def admin_check_call(cmd_line, quiet=False, stdout=None, stderr=None, addtnl_env=None): if addtnl_env is None: addtnl_env = dict() if 'windows' in platform.system().lower(): if not is_string(cmd_line): cmd_line = ' '.join(cmd_line) if not as_admin(): # pylint: disable=F0401,W0612 from win32com.shell.shell import ShellExecuteEx from win32event import WaitForSingleObject, INFINITE from win32process import GetExitCodeProcess handle = ShellExecuteEx(lpVerb='runas', lpFile=cmd_line)['hProcess'] WaitForSingleObject(handle, INFINITE) status = GetExitCodeProcess(handle) if status != 0: raise subprocess.CalledProcessError(status, cmd_line) else: check_call([cmd_line], stdout=stdout, stderr=stderr, env=addtnl_env) else: os_environ = os.environ.copy() os_environ = dict(list(os_environ.items()) + list(addtnl_env.items())) if is_string(cmd_line): cmd_line = cmd_line.split() sudo_prefix = [] if not as_admin(): sudo_prefix = ['sudo'] if quiet: check_call(sudo_prefix + cmd_line, stdout=stdout, stderr=stderr, env=os_environ) else: check_call(sudo_prefix + cmd_line, stdout=sys.stdout, stderr=sys.stderr, env=os_environ)
def compare_versions(actual, requested): ## convert float to string if isinstance(actual, float): actual = str(actual) if isinstance(requested, float): requested = str(requested) # convert string to tuple if is_string(actual): ver1 = version_str_split(actual) else: ver1 = actual if is_string(requested): ver2 = version_str_split(requested) else: ver2 = requested ## special case if ver2 is None: if ver1 is None: return 0 return 1 ## None == latest if ver1 < ver2: return -1 if ver1 > ver2: return 1 return 0
def __init__(self, module_or_function, callback): """ Can take either a module name or a function: Module given must define an onMessage(event) function that uses the passed event.data and returns data via a postMessage call. Function given must be gloabl and implement onMessage as above. """ self.module = self.function = None try: is_mod = is_string(module_or_function) except NameError: is_mod = isinstance(module_or_function, str) if is_mod: self.module = module_or_function else: self.function = module_or_function self.callback = callback self._wk = None if _worker_supported(): if self.module: JS("""@{{self}}._wk = new $wnd.Worker(@{{self.module}} + '.js');""") elif self.function: JS("""@{{self}}._wk = new $wnd.Worker('webworker_helper.js');""") self._wk.postMessage(self.function.__name__) self._wk.onmessage = self.onMessage self._wk.onerror = self.onError
def requirement_versioning(name): version = None strict = False if not is_string(name) and len(name) > 1: if len(name) > 2: strict = name[2] version = name[1] name = name[0] elif '=' in name: n_end = name.find('(') if n_end < 0: n_end = name.find('>') if n_end < 0: n_end = name.find('=') v_begin = name.rfind('=') + 1 v_end = name.find(')') if v_end < 0: v_end = len(name) if '==' in name[n_end:]: strict = True version = name[v_begin:v_end].strip() name = name[:n_end].strip() if name == 'None': name = None return name, version, strict
def literal_eval(node_or_string): if COMPATIBILITY_MODE: _safe_names = {'None': None, 'True': True, 'False': False} if is_string(node_or_string): node_or_string = ast_parse(node_or_string, mode='eval') if isinstance(node_or_string, ast.Expression): node_or_string = node_or_string.node # pylint: disable=E1103 def _convert(node): if isinstance(node, ast.Const) and \ (is_string(node.value) or isinstance(node.value, (int, float, long, complex))): return node.value elif isinstance(node, ast.Tuple): return tuple([_convert(x) for x in node.nodes]) elif isinstance(node, ast.List): return list([_convert(x) for x in node.nodes]) elif isinstance(node, ast.Dict): return dict((_convert(k), _convert(v)) for k, v in node.items) elif isinstance(node, ast.Name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, ast.UnarySub): return -_convert(node.expr) raise ValueError('malformed string') return _convert(node_or_string) else: return ast.literal_eval(node_or_string)
def in_prerequisites(item, prereqs): for p in prereqs: if not is_string(p): if item == p[0]: return True elif item == p: return True return False
def is_sequence(seq): if is_string(seq): return False try: len(seq) except TypeError: return False return True
def download(self, environ, version, strict=False): if not version is None: self.website = self.website + '/' + version + '/' for t in self.targets: if is_string(t): fetch(self.website, t, t) else: fetch(self.website + '/' + t[0], t[1], t[1]) return ''
def is_installed(self, environ, version=None, strict=False): for item in self.targets: if is_string(item): if not os.path.exists(os.path.join(self.target_dir, item)): return False else: if not os.path.exists(os.path.join(self.target_dir, item[0], item[1])): return False return True
def _get_js_tests(self): jstests = [] if self.tests: for pkg, tests in self.tests: pkgtests = [] for unit in tests: if is_string(unit) and unit.endswith('.html'): pkgtests.append(unit) if len(pkgtests) > 0: jstests.append((pkg, pkgtests)) return jstests
def _get_python_tests(self): pytests = [] if self.tests: for pkg, tests in self.tests: pkgtests = [] for unit in tests: if is_string(unit): pkgtests.append(unit) if len(pkgtests) > 0: pytests.append((pkg, pkgtests)) return pytests
def convert_ulist(str_list): ## distutils *might* not be able to handle unicode, convert it if str_list is None: return None converted = [] for s in str_list: if is_string(s): converted.append(''.join(chr(ord(c)) for c in s.decode('ascii'))) else: converted.append(s) return converted
def install(self, environ, version, strict=False, locally=True): self.download(environ, version, strict) if not os.path.exists(self.target_dir): os.makedirs(self.target_dir) for t in self.targets: if is_string(t): shutil.copy(os.path.join(options.download_dir, t), self.target_dir) else: js_subdir = os.path.join(self.target_dir, t[0]) js_file = t[1] if not os.path.exists(js_subdir): os.makedirs(js_subdir) shutil.copy(os.path.join(options.download_dir, js_file), js_subdir)
def _dict_append(d, **kws): for k,v in list(kws.items()): if k not in d: d[k] = v continue dv = d[k] if isinstance(dv, tuple): d[k] = dv + tuple(v) elif isinstance(dv, list): d[k] = dv + list(v) elif isinstance(dv, dict): _dict_append(dv, **v) # pylint: disable=W0142 elif is_string(dv): d[k] = dv + v else: raise TypeError(repr(type(dv)))
def mingw_check_call(environ, cmd_line, stdin=None, stdout=None, stderr=None, addtnl_env=None): if addtnl_env is None: addtnl_env = dict() path = os.path.join(environ['MSYS_DIR'], 'bin') + ';' + \ os.path.join(environ['MINGW_DIR'], 'bin') + ';' os_environ = os.environ.copy() old_path = os_environ.get('PATH', '') os_environ['PATH'] = path.encode('ascii', 'ignore') + os.pathsep + old_path os_environ = dict(list(os_environ.items()) + list(addtnl_env.items())) shell = os.path.join(environ['MSYS_DIR'], 'bin', 'bash.exe') if not is_string(cmd_line): cmd_line = ' '.join(cmd_line) p = subprocess.Popen(shell + ' -c "' + cmd_line + '"', env=os_environ, stdout=stdout, stderr=stderr) status = p.wait() if status != 0: raise subprocess.CalledProcessError(status, cmd_line)
def _convert(node): if isinstance(node, ast.Const) and \ (is_string(node.value) or isinstance(node.value, (int, float, long, complex))): return node.value elif isinstance(node, ast.Tuple): return tuple([_convert(x) for x in node.nodes]) elif isinstance(node, ast.List): return list([_convert(x) for x in node.nodes]) elif isinstance(node, ast.Dict): return dict((_convert(k), _convert(v)) for k, v in node.items) elif isinstance(node, ast.Name): if node.name in _safe_names: return _safe_names[node.name] elif isinstance(node, ast.UnarySub): return -_convert(node.expr) raise ValueError('malformed string')
def finalize_options(self): """ Perhaps not necessary? (potential OSX problem) from sysdevel.distutils.prerequisites import gcc_is_64bit if ((self.f77exec is None and self.f90exec is None) or \ 'gfortran' in self.f77exec or 'gfortran' in self.f90exec) and \ 'darwin' in platform.system().lower(): ## Unify GCC and GFortran default outputs if gcc_is_64bit(): os.environ['FFLAGS'] = '-arch x86_64' os.environ['FCFLAGS'] = '-arch x86_64' else: os.environ['FFLAGS'] = '-arch i686' os.environ['FCFLAGS'] = '-arch i686' """ # the rest is *nearly* identical to that in the numpy original log.info('unifing config_fc, config, build_clib, build_shlib, ' + 'build_ext, build commands --fcompiler options') build_clib = self.get_finalized_command('build_clib') build_shlib = self.get_finalized_command('build_shlib') build_ext = self.get_finalized_command('build_ext') config = self.get_finalized_command('config') build = self.get_finalized_command('build') cmd_list = [self, config, build_clib, build_shlib, build_ext, build] for a in ['fcompiler']: l = [] for c in cmd_list: v = getattr(c,a) if v is not None: if not is_string(v): v = v.compiler_type if v not in l: l.append(v) if not l: v1 = None else: v1 = l[0] if len(l)>1: log.warn(' commands have different --%s options: %s'\ ', using first in list as default' % (a, l)) if v1: for c in cmd_list: if getattr(c,a) is None: setattr(c, a, v1)
def all_strings(lst): """Return True if all items in lst are string objects. """ for item in lst: if not is_string(item): return False return True
def __run_helper__( short_name, helper, version, strict, environment, skip, install, quiet, out=sys.stdout, err=sys.stderr, locally=True, download=False, ): configured.append(short_name) try: cfg = helper.configuration() except Exception: ver_info = "" if version: ver_info = " v." + str(version) err.write("Error loading " + short_name + ver_info + " configuration.\n") raise for dep in cfg.dependencies: dep_name = dep if not is_string(dep): dep_name = dep[0] if dep_name in configured: continue environment = find_package_config( dep, __run_helper__, environment, skip, install, quiet, out, err, locally, download ) if not environment is None: save_cache(environment) environment = read_cache() if not quiet: msg = "Checking for " + short_name if version: msg += " v." + version if strict: msg += " (strict)" msg += " " * (40 - len(msg)) out.write(msg) out.flush() if download: cfg.download(environment, version, strict) if skip: cfg.null() elif not cfg.is_installed(environment, version, strict): if install: if not quiet: out.write("Installing...\n") cfg.install(environment, version, strict, locally) elif not quiet: out.write("not found.\n") elif cfg.force: if install: if not quiet: out.write("Forcing install...\n") cfg.install(environment, version, strict, locally) elif not quiet: out.write("found.\n") elif not quiet: out.write("found.\n") env = dict(list(cfg.environment.items()) + list(environment.items())) if not "PREREQUISITES" in env: env["PREREQUISITES"] = [short_name] else: tmp_env = env["PREREQUISITES"] + [short_name] env["PREREQUISITES"] = list(set(tmp_env)) save_cache(env) ## intermediate cache return env
def setup(**attr): # pylint: disable=W0212 # pylint: disable=W0142 cmdclass = my_cmdclass.copy() new_attr = attr.copy() if 'cmdclass' in new_attr: cmdclass.update(new_attr['cmdclass']) new_attr['cmdclass'] = cmdclass if 'configuration' in new_attr: # To avoid calling configuration if there are any errors # or help request in command in the line. configuration = new_attr.pop('configuration') old_dist = distutils.core._setup_distribution old_stop = distutils.core._setup_stop_after distutils.core._setup_distribution = None distutils.core._setup_stop_after = "commandline" try: dist = setup(**new_attr) finally: distutils.core._setup_distribution = old_dist distutils.core._setup_stop_after = old_stop if dist.help or not _command_line_ok(): # probably displayed help, skip running any commands return dist # create setup dictionary and append to new_attr cfg = configuration() if hasattr(cfg,'todict'): cfg = cfg.todict() _dict_append(new_attr, **cfg) # pylint: disable=W0142 # Move extension source libraries to libraries libraries = [] for ext in new_attr.get('ext_modules',[]): new_libraries = [] for item in ext.libraries: #[item] = convert_ulist([item]) if is_sequence(item): lib_name, _ = item _check_append_ext_library(libraries, item) new_libraries.append(lib_name) elif is_string(item): new_libraries.append(item) else: raise TypeError("invalid description of extension module " "library %r" % (item,)) ext.libraries = new_libraries if libraries: if 'libraries' not in new_attr: new_attr['libraries'] = [] for item in libraries: _check_append_library(new_attr['libraries'], item) # sources in ext_modules or libraries may contain header files if ('ext_modules' in new_attr or 'libraries' in new_attr) \ and 'headers' not in new_attr: new_attr['headers'] = [] # Use our custom Distribution class instead of distutils' one new_attr['distclass'] = CustomDistribution if not USING_SETUPTOOLS and setuptools_in_use(): raise Exception("Spurious import of setuptools. Failure in build.") return old_setup(**new_attr)
def build_target(builder, target, name, mode): """ Common function for build_* commands """ target_name = name if mode == SHARED_LIBRARY: target_name = builder.compiler.library_filename(name, lib_type='shared', output_dir='') #libraries = convert_ulist(_get(target, 'libraries', [])) ## unused library_dirs = convert_ulist(_get(target, 'library_dirs', [])) runtime_library_dirs = convert_ulist(_get(target, 'runtime_library_dirs', [])) extra_preargs = _get(target, 'extra_compile_args', []) extra_postargs = _get(target, 'extra_link_args', []) ## include libraries built by build_shlib and/or build_clib library_dirs.append(builder.build_temp) ## Conditional recompile build_directory = builder.build_clib target_path = os.path.join(build_directory, name) recompile = False if not os.path.exists(target_path) or builder.force: recompile = True else: for src in _get(target, 'sources', []): if os.path.getmtime(target_path) < os.path.getmtime(src): recompile = True break if not recompile: return library_dirs += [builder.build_clib] ######################################## ## Copied from numpy.distutils.command.build_clib # default compilers compiler = builder.compiler fcompiler = getattr(builder, '_f_compiler', builder.fcompiler) sources = _get(target, 'sources') if sources is None or not is_sequence(sources): raise DistutilsSetupError(("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % name) sources = list(sources) c_sources, cxx_sources, f_sources, fmodule_sources = filter_sources(sources) requiref90 = not not fmodule_sources or \ (_get(target, 'language', 'c') == 'f90') # save source type information so that build_ext can use it. source_languages = [] if c_sources: source_languages.append('c') if cxx_sources: source_languages.append('c++') if requiref90: source_languages.append('f90') elif f_sources: source_languages.append('f77') _put(target, 'source_languages', source_languages) lib_file = compiler.library_filename(name, output_dir=build_directory) if mode == SHARED_LIBRARY: lib_file = compiler.library_filename(name, lib_type='shared', output_dir=build_directory) depends = sources + (_get(target, 'depends', [])) if not (builder.force or newer_group(depends, lib_file, 'newer')): log.debug("skipping '%s' library (up-to-date)", name) return else: log.info("building '%s' library", name) if have_numpy: config_fc = _get(target, 'config_fc', {}) if fcompiler is not None and config_fc: log.info('using additional config_fc from setup script '\ 'for fortran compiler: %s' % (config_fc,)) from numpy.distutils.fcompiler import new_fcompiler fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, verbose=builder.verbose, dry_run=builder.dry_run, force=builder.force, requiref90=requiref90, c_compiler=builder.compiler) if fcompiler is not None: dist = builder.distribution base_config_fc = dist.get_option_dict('config_fc').copy() base_config_fc.update(config_fc) fcompiler.customize(base_config_fc) # check availability of Fortran compilers if (f_sources or fmodule_sources) and fcompiler is None: ver = '77' if requiref90: ver = '90' raise DistutilsError("target %s has Fortran%s sources" \ " but no Fortran compiler found" % (name, ver)) macros = _get(target, 'define_macros') include_dirs = convert_ulist(_get(target, 'include_dirs')) if include_dirs is None: include_dirs = [] extra_postargs = _get(target, 'extra_compiler_args') or [] if have_numpy: include_dirs.extend(get_numpy_include_dirs()) # where compiled F90 module files are: module_dirs = _get(target, 'module_dirs', []) module_build_dir = os.path.dirname(lib_file) if requiref90: builder.mkpath(module_build_dir) if compiler.compiler_type=='msvc': # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] objects = [] if c_sources: log.info("compiling C sources") objects = compiler.compile(c_sources, output_dir=builder.build_temp, macros=macros, include_dirs=include_dirs, debug=builder.debug, extra_postargs=extra_postargs) if cxx_sources: log.info("compiling C++ sources") cxx_compiler = compiler.cxx_compiler() cxx_objects = cxx_compiler.compile(cxx_sources, output_dir=builder.build_temp, macros=macros, include_dirs=include_dirs, debug=builder.debug, extra_postargs=extra_postargs) objects.extend(cxx_objects) if f_sources or fmodule_sources: if not have_numpy: raise Exception("Fortran sources, but no NumPy to compile them.") extra_postargs = [] f_objects = [] if requiref90: if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options( module_dirs,module_build_dir) if fmodule_sources: log.info("compiling Fortran 90 module sources") f_objects += fcompiler.compile(fmodule_sources, output_dir=builder.build_temp, macros=macros, include_dirs=include_dirs, debug=builder.debug, extra_postargs=extra_postargs) if requiref90 and fcompiler.module_dir_switch is None: # move new compiled F90 module files to module_build_dir for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f)==os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: builder.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=builder.build_temp, macros=macros, include_dirs=include_dirs, debug=builder.debug, extra_postargs=extra_postargs) else: f_objects = [] objects.extend(f_objects) # assume that default linker is suitable for # linking Fortran object files ######################################## if _get(target, 'link_with_fcompiler', False): # if using PROGRAM link_compiler = fcompiler else: link_compiler = compiler if cxx_sources: link_compiler = cxx_compiler extra_postargs = _get(target, 'extra_link_args') or [] ## May be dependent on other libs we're builing shlib_libraries = [] for libinfo in _get(target, 'libraries', []): if is_string(libinfo): shlib_libraries.append(convert_ulist([libinfo])[0]) else: shlib_libraries.append(libinfo[0]) if mode == EXECUTABLE: if not hasattr(link_compiler, 'linker_exe') or \ link_compiler.linker_exe is None: link_compiler.linker_exe = [link_compiler.linker_so[0]] target_desc = link_compiler.EXECUTABLE elif mode == SHARED_LIBRARY: target_desc = link_compiler.SHARED_LIBRARY linker_args = dict( target_desc = target_desc, objects = objects, output_filename = target_name, output_dir = build_directory, libraries = shlib_libraries, library_dirs = library_dirs, debug = builder.debug, extra_preargs = extra_preargs, extra_postargs = extra_postargs, ) if not _get(target, 'link_with_fcompiler', False): linker_args['runtime_library_dirs'] = runtime_library_dirs link_compiler.link(**linker_args) # pylint: disable=W0142