def find_compilers(*path): """Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in path: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. types = all_compiler_types() compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = reduce(lambda x, y: x + y, compiler_lists) return clist
def find_compilers(*path): """Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in path: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. types = all_compiler_types() compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = reduce(lambda x,y: x+y, compiler_lists) return clist
def find(cls, *path): """Try to find this type of compiler in the user's environment. For each set of compilers found, this returns compiler objects with the cc, cxx, f77, fc paths and the version filled in. This will search for compilers with the names in cc_names, cxx_names, etc. and it will group them if they have common prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would be grouped with g++-mp-4.7 and gfortran-mp-4.7. """ dicts = parmap( lambda t: cls._find_matches_in_path(*t), [(cls.cc_names, cls.cc_version) + tuple(path), (cls.cxx_names, cls.cxx_version) + tuple(path), (cls.f77_names, cls.f77_version) + tuple(path), (cls.fc_names, cls.fc_version) + tuple(path)]) all_keys = set() for d in dicts: all_keys.update(d) compilers = [] for k in all_keys: ver, pre, suf = k paths = tuple(pn[k] if k in pn else None for pn in dicts) spec = spack.spec.CompilerSpec(cls.name, ver) compilers.append(cls(spec, *paths)) return compilers
def find(cls, *path): """Try to find this type of compiler in the user's environment. For each set of compilers found, this returns compiler objects with the cc, cxx, f77, fc paths and the version filled in. This will search for compilers with the names in cc_names, cxx_names, etc. and it will group them if they have common prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would be grouped with g++-mp-4.7 and gfortran-mp-4.7. """ dicts = parmap( lambda t: cls._find_matches_in_path(*t), [(cls.cc_names, cls.cc_version) + tuple(path), (cls.cxx_names, cls.cxx_version) + tuple(path), (cls.f77_names, cls.f77_version) + tuple(path), (cls.fc_names, cls.fc_version) + tuple(path)]) all_keys = set() for d in dicts: all_keys.update(d) compilers = [] for k in all_keys: ver, pre, suf = k paths = tuple(pn[k] if k in pn else None for pn in dicts) spec = spack.spec.CompilerSpec(cls.name, ver) compilers.append(cls(spec, *paths)) return compilers
def find_compilers(self, *paths): types = spack.compilers.all_compiler_types() compiler_lists = parmap( lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = reduce(lambda x, y: x + y, compiler_lists) return clist
def find_compilers(self, *paths): types = spack.compilers.all_compiler_types() compiler_lists = parmap( lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = [comp for cl in compiler_lists for comp in cl] return clist
def find(cls, *path): """Try to find this type of compiler in the user's environment. For each set of compilers found, this returns compiler objects with the cc, cxx, f77, fc paths and the version filled in. This will search for compilers with the names in cc_names, cxx_names, etc. and it will group them if they have common prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would be grouped with g++-mp-4.7 and gfortran-mp-4.7. """ dicts = parmap( lambda t: cls._find_matches_in_path(*t), [ (cls.cc_names, cls.cc_version) + tuple(path), (cls.cxx_names, cls.cxx_version) + tuple(path), (cls.f77_names, cls.f77_version) + tuple(path), (cls.fc_names, cls.fc_version) + tuple(path), ], ) all_keys = set() for d in dicts: all_keys.update(d) compilers = {} for k in all_keys: ver, pre, suf = k # Skip compilers with unknown version. if ver == "unknown": continue paths = tuple(pn[k] if k in pn else None for pn in dicts) spec = spack.spec.CompilerSpec(cls.name, ver) if ver in compilers: prev = compilers[ver] # prefer the one with more compilers. prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc] newcount = len([p for p in paths if p is not None]) prevcount = len([p for p in prev_paths if p is not None]) # Don't add if it's not an improvement over prev compiler. if newcount <= prevcount: continue compilers[ver] = cls(spec, *paths) return list(compilers.values())
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path, ) + match.groups() + ( detect_version, ) checks.append(key) successful = [ k for k in parmap(_get_versioned_tuple, checks) if k is not None ] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful)
def find_compiler(self, cmp_cls, *path): """Try to find the given type of compiler in the user's environment. For each set of compilers found, this returns compiler objects with the cc, cxx, f77, fc paths and the version filled in. This will search for compilers with the names in cc_names, cxx_names, etc. and it will group them if they have common prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would be grouped with g++-mp-4.7 and gfortran-mp-4.7. """ dicts = parmap( lambda t: cmp_cls._find_matches_in_path(*t), [(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path), (cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path), (cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path), (cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)]) all_keys = set() for d in dicts: all_keys.update(d) compilers = {} for k in all_keys: ver, pre, suf = k # Skip compilers with unknown version. if ver == 'unknown': continue paths = tuple(pn[k] if k in pn else None for pn in dicts) spec = spack.spec.CompilerSpec(cmp_cls.name, ver) if ver in compilers: prev = compilers[ver] # prefer the one with more compilers. prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc] newcount = len([p for p in paths if p is not None]) prevcount = len([p for p in prev_paths if p is not None]) # Don't add if it's not an improvement over prev compiler. if newcount <= prevcount: continue compilers[ver] = cmp_cls(spec, self, py_platform.machine(), paths) return list(compilers.values())
def find_compilers(self, *paths): """ Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ if not paths: paths = get_path('PATH') # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in paths: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. # NOTE: we import spack.compilers here to avoid init order cycles import spack.compilers types = spack.compilers.all_compiler_types() compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = [comp for cl in compiler_lists for comp in cl] return clist
def check(key): try: full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug("Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug("Error while executing candidate compiler %s" % full_path, "%s: %s" %(e.__class__.__name__, e)) return None successful = [key for key in parmap(check, checks) if key is not None] return dict(((v, p, s), path) for v, p, s, path in successful) @classmethod def find(cls, *path): """Try to find this type of compiler in the user's environment. For each set of compilers found, this returns compiler objects with the cc, cxx, f77, fc paths and the version filled in. This will search for compilers with the names in cc_names, cxx_names, etc. and it will group them if they have common prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would be grouped with g++-mp-4.7 and gfortran-mp-4.7. """ dicts = parmap(
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path,) + match.groups() checks.append(key) def check(key): try: full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError as e: tty.debug( "Couldn't get version for compiler %s" % full_path, e) return None except Exception as e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug("Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None successful = [k for k in parmap(check, checks) if k is not None] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful)
full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug("Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug( "Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None successful = [key for key in parmap(check, checks) if key is not None] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful) @classmethod def find(cls, *path): """Try to find this type of compiler in the user's environment. For each set of compilers found, this returns compiler objects with the cc, cxx, f77, fc paths and the version filled in. This will search for compilers with the names in cc_names, cxx_names, etc. and it will group them if they have common
full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug( "Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug("Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None successful = [k for k in parmap(check, checks) if k is not None] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful) def _find_full_path(self, path): """Return the actual path for a tool. Some toolchains use forwarding executables (particularly Xcode-based toolchains) which can be manipulated by external environment variables. This method should be used to extract the actual path used for a tool by finding out the end executable the forwarding executables end up running.
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path,) + match.groups() checks.append(key) def check(key): try: full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError as e: tty.debug( "Couldn't get version for compiler %s" % full_path, e) return None except Exception as e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug("Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None successful = [k for k in parmap(check, checks) if k is not None] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful)
class Compiler(object): """This class encapsulates a Spack "compiler", which includes C, C++, and Fortran compilers. Subclasses should implement support for specific compilers, their possible names, arguments, and how to identify the particular type of compiler.""" # Subclasses use possible names of C compiler cc_names = [] # Subclasses use possible names of C++ compiler cxx_names = [] # Subclasses use possible names of Fortran 77 compiler f77_names = [] # Subclasses use possible names of Fortran 90 compiler fc_names = [] # Optional prefix regexes for searching for this type of compiler. # Prefixes are sometimes used for toolchains, e.g. 'powerpc-bgq-linux-' prefixes = [] # Optional suffix regexes for searching for this type of compiler. # Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y' # version suffix for gcc. suffixes = [r'-.*'] # Names of generic arguments used by this compiler arg_rpath = '-Wl,-rpath,%s' # argument used to get C++11 options cxx11_flag = "-std=c++11" def __init__(self, cspec, cc, cxx, f77, fc): def check(exe): if exe is None: return None _verify_executables(exe) return exe self.cc = check(cc) self.cxx = check(cxx) self.f77 = check(f77) self.fc = check(fc) self.spec = cspec @property def version(self): return self.spec.version # # Compiler classes have methods for querying the version of # specific compiler executables. This is used when discovering compilers. # # Compiler *instances* are just data objects, and can only be # constructed from an actual set of executables. # @classmethod def default_version(cls, cc): """Override just this to override all compiler version functions.""" return dumpversion(cc) @classmethod def cc_version(cls, cc): return cls.default_version(cc) @classmethod def cxx_version(cls, cxx): return cls.default_version(cxx) @classmethod def f77_version(cls, f77): return cls.default_version(f77) @classmethod def fc_version(cls, fc): return cls.default_version(fc) @classmethod def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path,) + match.groups() checks.append(key) def check(key): try: full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug("Couldn't get version for compiler %s" % full_path, e) return None successful = [key for key in parmap(check, checks) if key is not None] return dict(((v, p, s), path) for v, p, s, path in successful)
full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug( "Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug("Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None successful = [k for k in parmap(check, checks) if k is not None] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful) def _find_full_path(self, path): """Return the actual path for a tool. Some toolchains use forwarding executables (particularly Xcode-based toolchains) which can be manipulated by external environment variables. This method should be used to extract the actual path used for a tool by finding out the end executable the forwarding executables end up running.