def __init__(self, path, basecache=None): """ Create a new JSON cache. Optionally supports populating the cache with values of an existing cache. Arguments: basecache (TransientCache, optional): Cache to populate this new cache with. """ super(JsonCache, self).__init__() self.path = fs.abspath(path) if fs.exists(self.path) and fs.read_file(self.path): io.debug(("Loading cache '{0}'".format(self.path))) with open(self.path) as file: self._data = json.load(file) if basecache is not None: for key, val in basecache.items(): self._data[key] = val # Register exit handler atexit.register(self.write)
def test_cd(): cwd = os.getcwd() new = fs.abspath("..") assert new == fs.cd("..") assert new == os.getcwd() assert cwd == fs.cdpop() assert cwd == os.getcwd() assert cwd == fs.cdpop() assert cwd == os.getcwd() assert cwd == fs.cdpop() assert cwd == os.getcwd()
def make(target="all", dir=".", **kwargs): """ Run make. Arguments: target (str, optional): Name of the target to build. Defaults to "all". dir (str, optional): Path to directory containing Makefile. **kwargs (optional): Any additional arguments to be passed to system.run(). Returns: (int, str, str): The first element is the return code of the make command. The second and third elements are the stdout and stderr of the process. Raises: NoMakefileError: In case a Makefile is not found in the target directory. NoTargetError: In case the Makefile does not support the requested target. MakeError: In case the target rule fails. """ if not fs.isfile(fs.path(dir, "Makefile")): raise NoMakefileError("No makefile in '{}'".format(fs.abspath(dir))) fs.cd(dir) # Default parameters to system.run() if "timeout" not in kwargs: kwargs["timeout"] = 300 ret, out, err = system.run(["make", target], **kwargs) fs.cdpop() if ret > 0: if re.search(_BAD_TARGET_RE, err): raise NoTargetError("No rule for target '{}'".format(target)) else: raise MakeError("Target '{}' failed".format(target)) raise MakeError("Failed") return ret, out, err
def init_globals(rc_path: Path) -> None: global DB_ENGINE global DB_HOSTNAME global DB_PORT global DB_CREDENTIALS global DB_DIR global DB_BUF_SIZE path = fs.abspath(rc_path) _config = ConfigParser() _config.read(path) DB_ENGINE = _config['database']['engine'].lower() DB_HOSTNAME = _config['database'].get('hostname', "") DB_PORT = _config['database'].get('port', "") DB_CREDENTIALS = (_config['database'].get('username', ""), _config['database'].get('password', "")) DB_DIR = _config['database'].get("dir", "") DB_BUF_SIZE = int(_config['database']['buffer_size'])
def unpack_archive(*components, **kwargs) -> str: """ Unpack a compressed archive. Arguments: *components (str[]): Absolute path. **kwargs (dict, optional): Set "compression" to compression type. Default: bz2. Set "dir" to destination directory. Defaults to the directory of the archive. Returns: str: Path to directory. """ path = fs.abspath(*components) compression = kwargs.get("compression", "bz2") dir = kwargs.get("dir", fs.dirname(path)) fs.cd(dir) tar = tarfile.open(path, "r:" + compression) tar.extractall() tar.close() fs.cdpop() return dir
help="MySQL database hostname") parser.add_argument("clang", type=str, help="clang version") parser.add_argument("--clsmith", action="store_true", help="Only reduce CLSmith results") parser.add_argument("--clgen", action="store_true", help="Only reduce CLgen results") parser.add_argument("--recheck", action="store_true", help="Re-check existing errors") args = parser.parse_args() db.init(args.hostname) # initialize db engine clang = fs.abspath(f"../lib/llvm/build/{args.clang}/bin/clang") if not args.recheck and not fs.isfile(clang): print(f"fatal: clang '{clang}' does not exist") sys.exit(1) if args.clgen and args.clsmith: tablesets = [CLSMITH_TABLES, CLGEN_TABLES] elif args.clsmith: tablesets = [CLSMITH_TABLES] elif args.clgen: tablesets = [CLGEN_TABLES] else: tablesets = [CLSMITH_TABLES, CLGEN_TABLES] with Session(commit=True) as s:
def test_basename(): assert "foo" == fs.basename("foo") assert "foo" == fs.basename(fs.abspath("foo"))
def test_abspath_homedir(): assert os.path.expanduser("~") == fs.abspath("~") assert (os.path.join(os.path.expanduser("~"), "foo") == fs.abspath("~", "foo"))
def test_abspath(): assert (os.path.abspath(".") + "/foo/bar" == fs.abspath("foo", "bar")) assert (os.path.abspath(".") + "/foo/bar/car" == fs.abspath( "foo/bar", "car"))
def _execute(statement: str, file=sys.stdout) -> None: if not isinstance(statement, str): raise TypeError # parsing is case insensitive statement = re.sub("\s+", " ", statement.strip().lower()) components = statement.split(" ") if not statement: return # Parse command modifiers: if components[0] == "debug": statement = re.sub(r'^debug ', '', statement) with dsmith.debug_scope(): return _execute(statement, file=file) elif components[0] == "verbose": components = components[1:] statement = re.sub(r'^verbose ', '', statement) with dsmith.verbose_scope(): return _execute(statement, file=file) csv = ", ".join(f"'{x}'" for x in components) logging.debug(f"parsing input [{csv}]") # Full command parser: if len(components) == 1 and re.match(r'(hi|hello|hey)', components[0]): return _hello(file=file) if len(components) == 1 and re.match(r'(exit|quit)', components[0]): return _exit(file=file) if len(components) == 1 and components[0] == "help": return _help(file=file) if len(components) == 1 and components[0] == "version": return _version(file=file) if len(components) == 1 and components[0] == "test": return _test(file=file) if components[0] == "describe": generators_match = re.match(r'describe (?P<lang>\w+) generators$', statement) testbeds_match = re.match( r'describe (?P<available>available )?(?P<lang>\w+) testbeds$', statement) programs_match = re.match(r'describe (?P<lang>\w+) programs$', statement) testcases_match = re.match( r'describe (?P<lang>\w+) ((?P<generator>\w+) )?testcases$', statement) results_match = re.match(r'describe (?P<lang>\w+) results$', statement) if generators_match: lang = mklang(generators_match.group("lang")) return _describe_generators(lang=lang, file=file) elif testbeds_match: lang = mklang(testbeds_match.group("lang")) available_only = True if testbeds_match.group( "available") else False return lang.describe_testbeds(available_only=available_only, file=file) elif programs_match: lang = mklang(programs_match.group("lang")) return _describe_programs(lang=lang, file=file) elif testcases_match: lang = mklang(testcases_match.group("lang")) gen = testcases_match.group("generator") if gen: generator = lang.mkgenerator(gen) return _describe_testcases(lang=lang, generator=generator, file=file) else: for generator in lang.generators: _describe_testcases(lang=lang, generator=generator, file=file) return elif results_match: lang = mklang(results_match.group("lang")) return lang.describe_results(file=file) else: raise UnrecognizedInput if components[0] == "make": programs_match = re.match( r'make ((?P<up_to>up to )?(?P<number>\d+) )?(?P<lang>\w+) program(s)?( using (' r'?P<generator>\w+))?$', statement) testcases_match = re.match( r'make (?P<lang>\w+) ((?P<harness>\w+):(?P<generator>\w+)? )?testcases$', statement) if programs_match: number = int(programs_match.group("number") or 0) or math.inf lang = mklang(programs_match.group("lang")) generator = lang.mkgenerator(programs_match.group("generator")) return _make_programs( lang=lang, generator=generator, n=number, up_to=True if programs_match.group("up_to") else False, file=file) elif testcases_match: lang = mklang(testcases_match.group("lang")) if testcases_match.group("harness"): harness = lang.mkharness(testcases_match.group("harness")) if testcases_match.group("generator"): generators = [ lang.mkgenerator(testcases_match.group("generator")) ] else: # No generator specified, use all: generators = list(harness.generators) for generator in generators: harness.make_testcases(generator) else: # No harness specified, use all: for harness in lang.harnesses: for generator in harness.generators: harness.make_testcases(generator) return else: raise UnrecognizedInput if components[0] == "import": match = re.match( r'import (?P<generator>\w+) (?P<lang>\w+) program(s)? from (?P<path>.+)$', statement) if match: lang = mklang(match.group("lang")) generator = lang.mkgenerator(match.group("generator")) path = fs.abspath(match.group("path")) if not fs.isdir(path): raise ValueError(f"'{path}' is not a directory") return generator.import_from_dir(path) else: raise UnrecognizedInput if components[0] == "run": match = re.match( r'run (?P<lang>\w+) ((?P<harness>\w+):(?P<generator>\w+)? )?testcases( on (?P<testbed>[' r'\w+-±]+))?$', statement) if match: lang = mklang(match.group("lang")) if match.group("harness"): harness = lang.mkharness(match.group("harness")) if match.group("generator"): generators = [lang.mkgenerator(match.group("generator"))] else: # No generator specified, use all: generators = list(harness.generators) pairs = [(harness, generator) for generator in generators] else: pairs = [] # No harness specified, use all: for harness in lang.harnesses: pairs += [(harness, generator) for generator in harness.generators] for harness, generator in pairs: if match.group("testbed"): testbeds = lang.mktestbeds(match.group("testbed")) else: testbeds = harness.available_testbeds() for testbed in testbeds: testbed.run_testcases(harness, generator) return else: raise UnrecognizedInput if components[0] == "difftest": match = re.match(r'difftest (?P<lang>\w+) results$', statement) lang = mklang(match.group("lang")) return lang.difftest() raise UnrecognizedInput
def load_config(path="~/.omnitunerc.json"): path = fs.abspath(path) if fs.isfile(path): return json.load(open(path)) else: raise ConfigNotFoundError("File '{}' not found!".format(path))
#!/usr/bin/env python3 from collections import namedtuple import pyopencl as cl from dsmith.db import * from dsmith.lib import * from phd.lib.labm8 import fs from typing import NewType # paths to clreduce library CLREDUCE_DIR = fs.abspath('..', 'lib', 'clreduce') CLSMITH_DIR = fs.abspath('..', 'lib', 'CLSmith', 'build') # FIXME: # CL_LAUNCHER = fs.abspath('../lib/clsmith/build/cl_launcher') # CLSMITH_HEADERS = [path for path in fs.ls(CLSMITH_DIR, abspaths=True) if path.endswith('.h')] # CLSMITH_RUNTIME_DIR = fs.abspath('..', 'lib', 'CLSmith', 'runtime') # CREDUCE = fs.abspath(CLREDUCE_DIR, 'build_creduce', 'creduce', 'creduce') # INTERESTING_TEST = fs.abspath(CLREDUCE_DIR, 'interestingness_tests', 'wrong_code_bug.py') # OCLGRIND = fs.abspath('../lib/clgen/native/oclgrind/c3760d07365b74ccda04cd361e1b567a6d99dd8c/install/bin/oclgrind') status_t = NewType('status_t', int) return_t = namedtuple('return_t', ['runtime', 'status', 'log', 'src']) def get_platform_name(platform_id): platform = cl.get_platforms()[platform_id] return platform.get_info(cl.platform_info.NAME) def get_device_name(platform_id, device_id):
def main(self, args: List[str] = sys.argv[1:]): """ Compiler fuzzing through deep learning. """ parser = ArgumentParser(prog="dsmith", description=inspect.getdoc(self), epilog=__help_epilog__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument( "--config", metavar="<path>", type=FileType("r"), dest="rc_path", help=f"path to configuration file (default: '{dsmith.RC_PATH}')") parser.add_argument("-v", "--verbose", action="store_true", help="increase output verbosity") parser.add_argument("--debug", action="store_true", help="debugging output verbosity") parser.add_argument("--db-debug", action="store_true", help="additional database debugging output") parser.add_argument("--version", action="store_true", help="show version information and exit") parser.add_argument( "--profile", action="store_true", help=("enable internal API profiling. When combined with --verbose, " "prints a complete profiling trace")) parser.add_argument("command", metavar="<command>", nargs="*", help=("command to run. If not given, run an " "interactive prompt")) args = parser.parse_args(args) # set log level if args.debug: loglvl = logging.DEBUG os.environ["DEBUG"] = "1" # verbose stack traces. see: https://pymotw.com/2/cgitb/ import cgitb cgitb.enable(format='text') elif args.verbose: loglvl = logging.INFO else: loglvl = logging.WARNING # set database log level if args.db_debug: os.environ["DB_DEBUG"] = "1" # configure logger logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', level=loglvl) # set profile option if args.profile: prof.enable() # load custom config: if args.rc_path: path = fs.abspath(args.rc_path.name) logging.debug( f"loading configuration file '{Colors.BOLD}{path}{Colors.END}'") dsmith.init_globals(args.rc_path.name) # options whch override the normal argument parsing process. if args.version: print(dsmith.__version_str__) else: if len(args.command): # if a command was given, run it run_command(" ".join(args.command)) else: # no command was given, fallback to interactive prompt repl()