Exemple #1
0
def process_cl_file(db_path: str, path: str) -> None:
    """
    Process OpenCL file.

    Arguments:
        db_path (str): Path to output database.
        path (str): Path to input file.

    Raises:
        FetchError: In case of IO error.
    """
    db = dbutil.connect(db_path)
    c = db.cursor()

    log.debug("fetch {path}".format(path=fs.abspath(path)))
    try:
        contents = inline_fs_headers(path, [])
    except IOError:
        raise FetchError(
            "cannot read file '{path}'".format(path=fs.abspath(path)))
    c.execute('INSERT OR IGNORE INTO ContentFiles VALUES(?,?)',
              (path, contents))

    db.commit()
    c.close()
Exemple #2
0
    def __init__(self, path, basecache=None):
        """
        Create a new JSON cache.

        Optionally supports populating the cache with values of an
        existing cache.

        Arguments:
           basecache (TransientCache, optional): Cache to populate this new
             cache with.
        """

        super(JsonCache, self).__init__()
        self.path = fs.abspath(path)

        if fs.exists(self.path):
            io.debug(("Loading cache '{0}'".format(self.path)))
            with open(self.path) as file:
                self._data = json.load(file)

        if basecache is not None:
            for key,val in basecache.items():
                self._data[key] = val

        # Register exit handler
        atexit.register(self.write)
Exemple #3
0
    def __init__(self, path, basecache=None):
        """
    Create a new JSON cache.

    Optionally supports populating the cache with values of an
    existing cache.

    Arguments:
       basecache (TransientCache, optional): Cache to populate this new
         cache with.
    """

        super(JsonCache, self).__init__()
        self.path = fs.abspath(path)

        if fs.exists(self.path) and fs.read_file(self.path):
            io.debug(("Loading cache '{0}'".format(self.path)))
            with open(self.path) as file:
                self._data = json.load(file)

        if basecache is not None:
            for key, val in basecache.items():
                self._data[key] = val

        # Register exit handler
        atexit.register(self.write)
Exemple #4
0
    def from_json(corpus_json: dict):
        """
        Instantiate Corpus from JSON.

        Arguments:
            corpus_json (dict): Specification.

        Returns:
            Corpus: Insantiated corpus.
        """
        path = corpus_json.pop("path", None)
        uid = corpus_json.pop("id", None)

        if path:
            path = unpack_directory_if_needed(fs.abspath(path))
            if not fs.isdir(path):
                raise clgen.UserError(
                    "Corpus path '{}' is not a directory".format(path))
            uid = dirhash(path, 'sha1')
        elif uid:
            cache_path = fs.path(cache.ROOT, "corpus", uid)
            if not fs.isdir(cache_path):
                raise clgen.UserError("Corpus {} not found".format(uid))
        else:
            raise clgen.UserError("No corpus path or ID provided")

        return Corpus(uid, path=path, **corpus_json)
Exemple #5
0
def files_from_list(paths: list) -> list:
    """
    Return a list of all file paths from a list of files or directories.

    For each path in the input: if it is a file, return it; if it is a
    directory, return a list of files in the directory.

    Arguments:
        paths (list of str): List of file and directory paths.

    Returns:
        list of str: Absolute file paths.

    Raises:
        File404: If any of the paths do not exist.
    """
    ret = []
    for path in paths:
        if fs.isfile(path):
            ret.append(fs.abspath(path))
        elif fs.isdir(path):
            ret += [
                f for f in fs.ls(path, abspaths=True, recursive=True)
                if fs.isfile(f)
            ]
        else:
            raise File404(path)
    return ret
Exemple #6
0
def fetch_fs(db_path: str, paths: list=[]) -> None:
    """
    Fetch from a list of files.

    Arguments:
        db_path (str): Output dataset.
        paths (str[]): List of file paths.
    """
    paths = clgen.files_from_list(paths)  # expand directories

    db = dbutil.connect(db_path)
    c = db.cursor()

    for path in paths:
        log.debug("fetch", path)
        try:
            contents = inline_fs_headers(path, [])
        except IOError:
            db.commit()
            raise FetchError(
                "cannot read file '{path}'".format(path=fs.abspath(path)))
        c.execute('INSERT OR IGNORE INTO ContentFiles VALUES(?,?)',
                  (path, contents))

    db.commit()
Exemple #7
0
def main():
    """
    Reduce all databases to oracle.
    """
    dbs = [migrate(_db.Database(path)) for path in
           fs.ls(experiment.DB_DEST, abspaths=True)
           if not re.search("oracle.db$", path)
           and re.search(".db$", path)]
    merge(fs.abspath(experiment.DB_DEST, "oracle.db"),
          dbs, experiment.ORACLE_PATH)
Exemple #8
0
def main():
    """
    Reduce all databases to oracle.
    """
    dbs = [
        migrate(_db.Database(path))
        for path in fs.ls(experiment.DB_DEST, abspaths=True)
        if not re.search("oracle.db$", path) and re.search(".db$", path)
    ]
    merge(fs.abspath(experiment.DB_DEST, "oracle.db"), dbs,
          experiment.ORACLE_PATH)
Exemple #9
0
def test_cd():
    cwd = os.getcwd()
    new = fs.abspath("..")

    assert new == fs.cd("..")
    assert new == os.getcwd()

    assert cwd == fs.cdpop()
    assert cwd == os.getcwd()

    assert cwd == fs.cdpop()
    assert cwd == os.getcwd()

    assert cwd == fs.cdpop()
    assert cwd == os.getcwd()
Exemple #10
0
    def test_cd(self):
        cwd = os.getcwd()
        new = fs.abspath("..")

        self._test(new, fs.cd(".."))
        self._test(new, os.getcwd())

        self._test(cwd, fs.cdpop())
        self._test(cwd, os.getcwd())

        self._test(cwd, fs.cdpop())
        self._test(cwd, os.getcwd())

        self._test(cwd, fs.cdpop())
        self._test(cwd, os.getcwd())
Exemple #11
0
    def to_dist(self, distpath: str, author: str = None) -> str:
        """
        Create a dist file.

        Arguments:
            distpath (str): Path to dist file.
            author (str, optional): Author name.

        Returns:
            str: Path to generated distfile.
        """
        outpath = fs.abspath(distpath) + ".tar.bz2"
        if fs.exists(outpath):
            raise DistError("file {} exists".format(outpath))

        meta = self.meta
        if author is not None:
            meta["author"] = author
        log.debug(clgen.format_json(meta))

        try:
            tar = tarfile.open(outpath, 'w:bz2')

            # write meta
            metapath = mktemp(prefix="clgen-", suffix=".json")
            clgen.write_file(metapath, clgen.format_json(meta))
            log.debug("metafile:", metapath)

            # create tarball
            tar.add(metapath, arcname="meta.json")

            # pack contents:
            for path in meta["contents"]:
                abspath = fs.path(cache.ROOT, path)
                log.verbose("packing", abspath)
                tar.add(abspath, arcname=fs.path("contents", path))

            # tidy up
            fs.rm(metapath)
            tar.close()
        except Exception as e:
            tar.close()
            fs.rm(metapath)
            fs.rm(outpath)
            raise e

        return outpath
Exemple #12
0
def make(target="all", dir=".", **kwargs):
    """
    Run make clean.

    Arguments:

        target (str, optional): Name of the target to build. Defaults
          to "all".
        dir (str, optional): Path to directory containing Makefile.
        **kwargs (optional): Any additional arguments to be passed to
          system.run().

    Returns:

        (int, str, str): The first element is the return code of the
          make command. The second and third elements are the stdout
          and stderr of the process.

    Raises:

        NoMakefileError: In case a Makefile is not found in the target
          directory.
        NoTargetError: In case the Makefile does not support the
          requested target.
        MakeError: In case the target rule fails.
    """
    if not fs.isfile(fs.path(dir, "Makefile")):
        raise NoMakefileError("No makefile in '{}'".format(fs.abspath(dir)))

    fs.cd(dir)

    # Default parameters to system.run()
    if "timeout" not in kwargs: kwargs["timeout"] = 300

    ret, out, err = system.run(["make", target], **kwargs)
    fs.cdpop()

    if ret > 0:
        if re.search(_BAD_TARGET_RE, err):
            raise NoTargetError("No rule for target '{}'"
                                .format(target))
        else:
            raise MakeError("Target '{}' failed".format(target))

        raise MakeError("Failed")

    return ret, out, err
Exemple #13
0
def make(target="all", dir=".", **kwargs):
    """
  Run make.

  Arguments:

      target (str, optional): Name of the target to build. Defaults
        to "all".
      dir (str, optional): Path to directory containing Makefile.
      **kwargs (optional): Any additional arguments to be passed to
        system.run().

  Returns:

      (int, str, str): The first element is the return code of the
        make command. The second and third elements are the stdout
        and stderr of the process.

  Raises:

      NoMakefileError: In case a Makefile is not found in the target
        directory.
      NoTargetError: In case the Makefile does not support the
        requested target.
      MakeError: In case the target rule fails.
  """
    if not fs.isfile(fs.path(dir, "Makefile")):
        raise NoMakefileError("No makefile in '{}'".format(fs.abspath(dir)))

    fs.cd(dir)

    # Default parameters to system.run()
    if "timeout" not in kwargs: kwargs["timeout"] = 300

    ret, out, err = system.run(["make", target], **kwargs)
    fs.cdpop()

    if ret > 0:
        if re.search(_BAD_TARGET_RE, err):
            raise NoTargetError("No rule for target '{}'".format(target))
        else:
            raise MakeError("Target '{}' failed".format(target))

        raise MakeError("Failed")

    return ret, out, err
Exemple #14
0
def init_globals(rc_path: Path) -> None:
    global DB_ENGINE
    global DB_HOSTNAME
    global DB_PORT
    global DB_CREDENTIALS
    global DB_DIR
    global DB_BUF_SIZE

    path = fs.abspath(rc_path)

    _config = ConfigParser()
    _config.read(path)
    DB_ENGINE = _config['database']['engine'].lower()
    DB_HOSTNAME = _config['database'].get('hostname', "")
    DB_PORT = _config['database'].get('port', "")
    DB_CREDENTIALS = (_config['database'].get('username', ""),
                      _config['database'].get('password', ""))
    DB_DIR = _config['database'].get("dir", "")
    DB_BUF_SIZE = int(_config['database']['buffer_size'])
Exemple #15
0
    def from_json(corpus_json: dict) -> 'Corpus':
        """
        Instantiate Corpus from JSON.

        Parameters
        ----------
        corpus_json : dict
            Specification.

        Returns
        -------
        Corpus
            Insantiated corpus.
        """
        path = corpus_json.pop("path", None)
        uid = corpus_json.pop("id", None)
        language = clgen.Language.from_str(corpus_json.get("language"))

        if path:
            path = unpack_directory_if_needed(fs.abspath(path))
            if not fs.isdir(path):
                raise clgen.UserError(
                    "Corpus path '{}' is not a directory".format(path))

            dirhashcache = DirHashCache(clgen.cachepath("dirhash.db"), 'sha1')
            uid = prof.profile(dirhashcache.dirhash, path)
        elif uid:
            cache_path = clgen.mkcache("contentfiles",
                                       f"{language}-{uid}").path
            if not fs.isdir(cache_path):
                raise clgen.UserError(
                    "Corpus content {} not found".format(uid))
        else:
            raise clgen.UserError("No corpus path or ID provided")

        if "stats" in corpus_json:  # ignore stats
            del corpus_json["stats"]

        if "contentfiles" in corpus_json:
            del corpus_json["contentfiles"]

        return prof.profile(Corpus, uid, path=path, **corpus_json)
Exemple #16
0
def unpack_archive(*components, **kwargs) -> str:
    """
    Unpack a compressed archive.

    Arguments:
        *components (str[]): Absolute path.
        **kwargs (dict, optional): Set "compression" to compression type.
            Default: bz2. Set "dir" to destination directory. Defaults to the
            directory of the archive.

    Returns:
        str: Path to directory.
    """
    path = fs.abspath(*components)
    compression = kwargs.get("compression", "bz2")
    dir = kwargs.get("dir", fs.dirname(path))

    fs.cd(dir)
    tar = tarfile.open(path, "r:" + compression)
    tar.extractall()
    tar.close()
    fs.cdpop()

    return dir
Exemple #17
0
def unpack_archive(*components, **kwargs) -> str:
    """
    Unpack a compressed archive.

    Arguments:
        *components (str[]): Absolute path.
        **kwargs (dict, optional): Set "compression" to compression type.
            Default: bz2. Set "dir" to destination directory. Defaults to the
            directory of the archive.

    Returns:
        str: Path to directory.
    """
    path = fs.abspath(*components)
    compression = kwargs.get("compression", "bz2")
    dir = kwargs.get("dir", fs.dirname(path))

    fs.cd(dir)
    tar = tarfile.open(path, "r:" + compression)
    tar.extractall()
    tar.close()
    fs.cdpop()

    return dir
Exemple #18
0
#!/usr/bin/env python3
from collections import namedtuple
from typing import NewType

import pyopencl as cl
from dsmith.db import *
from dsmith.lib import *

from labm8 import fs


# paths to clreduce library
CLREDUCE_DIR = fs.abspath('..', 'lib', 'clreduce')
CLSMITH_DIR = fs.abspath('..', 'lib', 'CLSmith', 'build')

# FIXME:
# CL_LAUNCHER = fs.abspath('../lib/clsmith/build/cl_launcher')
# CLSMITH_HEADERS = [path for path in fs.ls(CLSMITH_DIR, abspaths=True) if path.endswith('.h')]
# CLSMITH_RUNTIME_DIR = fs.abspath('..', 'lib', 'CLSmith', 'runtime')
# CREDUCE = fs.abspath(CLREDUCE_DIR, 'build_creduce', 'creduce', 'creduce')
# INTERESTING_TEST = fs.abspath(CLREDUCE_DIR, 'interestingness_tests', 'wrong_code_bug.py')
# OCLGRIND = fs.abspath('../lib/clgen/native/oclgrind/c3760d07365b74ccda04cd361e1b567a6d99dd8c/install/bin/oclgrind')

status_t = NewType('status_t', int)
return_t = namedtuple('return_t', ['runtime', 'status', 'log', 'src'])


def get_platform_name(platform_id):
  platform = cl.get_platforms()[platform_id]
  return platform.get_info(cl.platform_info.NAME)
Exemple #19
0
def load_config(path="~/.omnitunerc.json"):
  path = fs.abspath(path)
  if fs.isfile(path):
    return json.load(open(path))
  else:
    raise ConfigNotFoundError("File '{}' not found!".format(path))
Exemple #20
0
                        help="MySQL database hostname")
    parser.add_argument("clang", type=str, help="clang version")
    parser.add_argument("--clsmith",
                        action="store_true",
                        help="Only reduce CLSmith results")
    parser.add_argument("--clgen",
                        action="store_true",
                        help="Only reduce CLgen results")
    parser.add_argument("--recheck",
                        action="store_true",
                        help="Re-check existing errors")
    args = parser.parse_args()

    db.init(args.hostname)  # initialize db engine

    clang = fs.abspath(f"../lib/llvm/build/{args.clang}/bin/clang")

    if not args.recheck and not fs.isfile(clang):
        print(f"fatal: clang '{clang}' does not exist")
        sys.exit(1)

    if args.clgen and args.clsmith:
        tablesets = [CLSMITH_TABLES, CLGEN_TABLES]
    elif args.clsmith:
        tablesets = [CLSMITH_TABLES]
    elif args.clgen:
        tablesets = [CLGEN_TABLES]
    else:
        tablesets = [CLSMITH_TABLES, CLGEN_TABLES]

    with Session(commit=True) as s:
Exemple #21
0
 def _main(db_file: Path, github: bool) -> None:
     """
     Create an empty OpenCL kernel database.
     """
     dbutil.create_db(db_file, github)
     print(fs.abspath(db_file))
Exemple #22
0
 def test_basename(self):
     self._test("foo", fs.basename("foo"))
     self._test("foo", fs.basename(fs.abspath("foo")))
Exemple #23
0
 def test_abspath_homedir(self):
     self._test(os.path.expanduser("~"), fs.abspath("~"))
     self._test(os.path.join(os.path.expanduser("~"), "foo"),
                fs.abspath("~", "foo"))
Exemple #24
0
 def test_abspath(self):
     self._test(os.path.abspath(".") + "/foo/bar",
                fs.abspath("foo", "bar"))
     self._test(os.path.abspath(".") + "/foo/bar/car",
                fs.abspath("foo/bar", "car"))
Exemple #25
0
def test_basename():
    assert "foo" == fs.basename("foo")
    assert "foo" == fs.basename(fs.abspath("foo"))
Exemple #26
0
def load_config(path="~/.omnitunerc.json"):
    path = fs.abspath(path)
    if fs.isfile(path):
        return json.load(open(path))
    else:
        raise ConfigNotFoundError("File '{}' not found!".format(path))
Exemple #27
0
def test_abspath():
    assert (os.path.abspath(".") + "/foo/bar" == fs.abspath("foo", "bar"))
    assert (os.path.abspath(".") + "/foo/bar/car" == fs.abspath(
        "foo/bar", "car"))
Exemple #28
0
def test_abspath_homedir():
    assert os.path.expanduser("~") == fs.abspath("~")
    assert (os.path.join(os.path.expanduser("~"),
                         "foo") == fs.abspath("~", "foo"))
Exemple #29
0
def _execute(statement: str, file=sys.stdout) -> None:
    if not isinstance(statement, str):
        raise TypeError

    # parsing is case insensitive
    statement = re.sub("\s+", " ", statement.strip().lower())
    components = statement.split(" ")

    if not statement:
        return

    # Parse command modifiers:
    if components[0] == "debug":
        statement = re.sub(r'^debug ', '', statement)
        with dsmith.debug_scope():
            return _execute(statement, file=file)
    elif components[0] == "verbose":
        components = components[1:]
        statement = re.sub(r'^verbose ', '', statement)
        with dsmith.verbose_scope():
            return _execute(statement, file=file)

    csv = ", ".join(f"'{x}'" for x in components)
    logging.debug(f"parsing input [{csv}]")

    # Full command parser:
    if len(components) == 1 and re.match(r'(hi|hello|hey)', components[0]):
        return _hello(file=file)

    if len(components) == 1 and re.match(r'(exit|quit)', components[0]):
        return _exit(file=file)

    if len(components) == 1 and components[0] == "help":
        return _help(file=file)

    if len(components) == 1 and components[0] == "version":
        return _version(file=file)

    if len(components) == 1 and components[0] == "test":
        return _test(file=file)

    if components[0] == "describe":
        generators_match = re.match(r'describe (?P<lang>\w+) generators$',
                                    statement)
        testbeds_match = re.match(
            r'describe (?P<available>available )?(?P<lang>\w+) testbeds$',
            statement)
        programs_match = re.match(r'describe (?P<lang>\w+) programs$',
                                  statement)
        testcases_match = re.match(
            r'describe (?P<lang>\w+) ((?P<generator>\w+) )?testcases$',
            statement)
        results_match = re.match(r'describe (?P<lang>\w+) results$', statement)

        if generators_match:
            lang = mklang(generators_match.group("lang"))
            return _describe_generators(lang=lang, file=file)
        elif testbeds_match:
            lang = mklang(testbeds_match.group("lang"))
            available_only = True if testbeds_match.group(
                "available") else False
            return lang.describe_testbeds(available_only=available_only,
                                          file=file)
        elif programs_match:
            lang = mklang(programs_match.group("lang"))
            return _describe_programs(lang=lang, file=file)
        elif testcases_match:
            lang = mklang(testcases_match.group("lang"))
            gen = testcases_match.group("generator")
            if gen:
                generator = lang.mkgenerator(gen)
                return _describe_testcases(lang=lang,
                                           generator=generator,
                                           file=file)
            else:
                for generator in lang.generators:
                    _describe_testcases(lang=lang,
                                        generator=generator,
                                        file=file)
                return
        elif results_match:
            lang = mklang(results_match.group("lang"))
            return lang.describe_results(file=file)
        else:
            raise UnrecognizedInput

    if components[0] == "make":
        programs_match = re.match(
            r'make ((?P<up_to>up to )?(?P<number>\d+) )?(?P<lang>\w+) program(s)?( using ('
            r'?P<generator>\w+))?$', statement)
        testcases_match = re.match(
            r'make (?P<lang>\w+) ((?P<harness>\w+):(?P<generator>\w+)? )?testcases$',
            statement)

        if programs_match:
            number = int(programs_match.group("number") or 0) or math.inf
            lang = mklang(programs_match.group("lang"))
            generator = lang.mkgenerator(programs_match.group("generator"))

            return _make_programs(
                lang=lang,
                generator=generator,
                n=number,
                up_to=True if programs_match.group("up_to") else False,
                file=file)

        elif testcases_match:
            lang = mklang(testcases_match.group("lang"))
            if testcases_match.group("harness"):
                harness = lang.mkharness(testcases_match.group("harness"))
                if testcases_match.group("generator"):
                    generators = [
                        lang.mkgenerator(testcases_match.group("generator"))
                    ]
                else:
                    # No generator specified, use all:
                    generators = list(harness.generators)

                for generator in generators:
                    harness.make_testcases(generator)
            else:
                # No harness specified, use all:
                for harness in lang.harnesses:
                    for generator in harness.generators:
                        harness.make_testcases(generator)
            return
        else:
            raise UnrecognizedInput

    if components[0] == "import":
        match = re.match(
            r'import (?P<generator>\w+) (?P<lang>\w+) program(s)? from (?P<path>.+)$',
            statement)

        if match:
            lang = mklang(match.group("lang"))
            generator = lang.mkgenerator(match.group("generator"))
            path = fs.abspath(match.group("path"))
            if not fs.isdir(path):
                raise ValueError(f"'{path}' is not a directory")

            return generator.import_from_dir(path)
        else:
            raise UnrecognizedInput

    if components[0] == "run":
        match = re.match(
            r'run (?P<lang>\w+) ((?P<harness>\w+):(?P<generator>\w+)? )?testcases( on (?P<testbed>['
            r'\w+-±]+))?$', statement)
        if match:
            lang = mklang(match.group("lang"))

            if match.group("harness"):
                harness = lang.mkharness(match.group("harness"))
                if match.group("generator"):
                    generators = [lang.mkgenerator(match.group("generator"))]
                else:
                    # No generator specified, use all:
                    generators = list(harness.generators)

                pairs = [(harness, generator) for generator in generators]
            else:
                pairs = []
                # No harness specified, use all:
                for harness in lang.harnesses:
                    pairs += [(harness, generator)
                              for generator in harness.generators]

            for harness, generator in pairs:
                if match.group("testbed"):
                    testbeds = lang.mktestbeds(match.group("testbed"))
                else:
                    testbeds = harness.available_testbeds()

                for testbed in testbeds:
                    testbed.run_testcases(harness, generator)
            return
        else:
            raise UnrecognizedInput

    if components[0] == "difftest":
        match = re.match(r'difftest (?P<lang>\w+) results$', statement)
        lang = mklang(match.group("lang"))

        return lang.difftest()

    raise UnrecognizedInput
Exemple #30
0
def main(self, args: List[str] = sys.argv[1:]):
  """
  Compiler fuzzing through deep learning.
  """
  parser = ArgumentParser(
      prog="dsmith",
      description=inspect.getdoc(self),
      epilog=__help_epilog__,
      formatter_class=RawDescriptionHelpFormatter)

  parser.add_argument(
      "--config", metavar="<path>", type=FileType("r"), dest="rc_path",
      help=f"path to configuration file (default: '{dsmith.RC_PATH}')")
  parser.add_argument(
      "-v", "--verbose", action="store_true",
      help="increase output verbosity")
  parser.add_argument(
      "--debug", action="store_true",
      help="debugging output verbosity")
  parser.add_argument(
      "--db-debug", action="store_true",
      help="additional database debugging output")
  parser.add_argument(
      "--version", action="store_true",
      help="show version information and exit")
  parser.add_argument(
      "--profile", action="store_true",
      help=("enable internal API profiling. When combined with --verbose, "
            "prints a complete profiling trace"))
  parser.add_argument("command", metavar="<command>", nargs="*",
                      help=("command to run. If not given, run an "
                            "interactive prompt"))

  args = parser.parse_args(args)

  # set log level
  if args.debug:
    loglvl = logging.DEBUG
    os.environ["DEBUG"] = "1"

    # verbose stack traces. see: https://pymotw.com/2/cgitb/
    import cgitb
    cgitb.enable(format='text')
  elif args.verbose:
    loglvl = logging.INFO
  else:
    loglvl = logging.WARNING

  # set database log level
  if args.db_debug:
    os.environ["DB_DEBUG"] = "1"

  # configure logger
  logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
                      level=loglvl)

  # set profile option
  if args.profile:
    prof.enable()

  # load custom config:
  if args.rc_path:
    path = fs.abspath(args.rc_path.name)
    logging.debug(
        f"loading configuration file '{Colors.BOLD}{path}{Colors.END}'")
    dsmith.init_globals(args.rc_path.name)

  # options whch override the normal argument parsing process.
  if args.version:
    print(dsmith.__version_str__)
  else:
    if len(args.command):
      # if a command was given, run it
      run_command(" ".join(args.command))
    else:
      # no command was given, fallback to interactive prompt
      repl()