def _get_gotool(self, tool_name, env): # First try the pref # Then try which # Then try the golang pref # Finally try which golang path = [d.strip() for d in env.get_envvar("PATH", "").split(os.pathsep) if d.strip()] tool_path = env.get_pref(tool_name + "DefaultLocation", "") if tool_path and os.path.exists(tool_path): return tool_path ext = sys.platform.startswith("win") and ".exe" or "" golang_path = env.get_pref("golangDefaultLocation", "") if golang_path: tool_path = os.path.join(os.path.dirname(golang_dir), tool_name + ext) if os.path.exists(tool_path): return tool_path try: return which.which(tool_name, path=path) except which.WhichError: pass try: golang_path = which.which('golang', path=path) except which.WhichError: return None tool_path = os.path.join(os.path.basename(golang_path, tool_name)) + ext if os.path.exists(tool_path): return tool_path return None
def has_command(cmd): from which import which, WhichError try: which(cmd) return True except WhichError as e: return False
def eclipse(self, ide, args): if ide == 'eclipse': backend = 'CppEclipse' elif ide == 'visualstudio': backend = 'VisualStudio' if ide == 'eclipse': try: which.which('eclipse') except which.WhichError: print('Eclipse CDT 8.4 or later must be installed in your PATH.') print('Download: http://www.eclipse.org/cdt/downloads.php') return 1 # Here we refresh the whole build. 'build export' is sufficient here and is probably more # correct but it's also nice having a single target to get a fully built and indexed # project (gives a easy target to use before go out to lunch). res = self._mach_context.commands.dispatch('build', self._mach_context) if res != 0: return 1 # Generate or refresh the IDE backend. python = self.virtualenv_manager.python_path config_status = os.path.join(self.topobjdir, 'config.status') args = [python, config_status, '--backend=%s' % backend] res = self._run_command_in_objdir(args=args, pass_thru=True, ensure_exit_code=False) if res != 0: return 1 if ide == 'eclipse': eclipse_workspace_dir = self.get_eclipse_workspace_path() process = subprocess.check_call(['eclipse', '-data', eclipse_workspace_dir]) elif ide == 'visualstudio': visual_studio_workspace_dir = self.get_visualstudio_workspace_path() process = subprocess.check_call(['explorer.exe', visual_studio_workspace_dir])
def debug(self, params, remote, background, debugger, debugparams, slowscript): import which use_lldb = False use_gdb = False if debugger: try: debugger = which.which(debugger) except Exception as e: print("You don't have %s in your PATH" % (debugger)) print(e) return 1 else: try: debugger = which.which('gdb') use_gdb = True except Exception: try: debugger = which.which('lldb') use_lldb = True except Exception as e: print("You don't have gdb or lldb in your PATH") print(e) return 1 args = [debugger] extra_env = { 'MOZ_CRASHREPORTER_DISABLE' : '1' } if debugparams: import pymake.process argv, badchar = pymake.process.clinetoargv(debugparams, os.getcwd()) if badchar: print("The +debugparams you passed require a real shell to parse them.") print("(We can't handle the %r character.)" % (badchar,)) return 1 args.extend(argv) binpath = None try: binpath = self.get_binary_path('app') except Exception as e: print("It looks like your program isn't built.", "You can run |mach build| to build it.") print(e) return 1 if use_gdb: args.append('--args') elif use_lldb: args.append('--') args.append(binpath) if not remote: args.append('-no-remote') if not background and sys.platform == 'darwin': args.append('-foreground') if params: args.extend(params) if not slowscript: extra_env['JS_DISABLE_SLOW_SCRIPT_SIGNALS'] = '1' return self.run_process(args=args, append_env=extra_env, ensure_exit_code=False, pass_thru=True)
def command(cmd): which(cmd) def wrapper(argstr, **kwargs): fullcmd = cmd + ' %s' % argstr log.debug('Executing shell command: %s' % fullcmd) return subprocess.Popen(fullcmd, shell=True, **kwargs) return wrapper
def run_test( self, test_paths, b2g_home=None, busybox=None, device_name=None, test_objects=None, log=None, # ignore parameters from other platforms' options **kwargs ): try: import which which.which("adb") except which.WhichError: # TODO Find adb automatically if it isn't on the path print(ADB_NOT_FOUND % ("mochitest-remote", b2g_home)) sys.exit(1) test_path = None if test_objects: if len(test_objects) > 1: print("Warning: Only the first test will be used.") test_path = self._wrap_path_argument(test_objects[0]["path"]) elif test_paths: if len(test_paths) > 1: print("Warning: Only the first test path will be used.") test_path = self._wrap_path_argument(test_paths[0]).relpath() import runtestsb2g parser = runtestsb2g.B2GOptions() options, args = parser.parse_args([]) options.b2g_path = b2g_home options.busybox = busybox or os.environ.get("BUSYBOX") options.localLib = self.bin_dir options.localBin = self.bin_dir options.logdir = self.xpcshell_dir options.manifest = os.path.join(self.xpcshell_dir, "xpcshell.ini") options.mozInfo = os.path.join(self.topobjdir, "mozinfo.json") options.objdir = self.topobjdir options.symbolsPath = (os.path.join(self.distdir, "crashreporter-symbols"),) options.testingModulesDir = os.path.join(self.tests_dir, "modules") options.testsRootDir = self.xpcshell_dir options.testPath = test_path options.use_device_libs = True options.emulator = "arm" if device_name.startswith("emulator"): if "x86" in device_name: options.emulator = "x86" if not options.busybox: options.busybox = self._download_busybox(b2g_home, options.emulator) return runtestsb2g.run_remote_xpcshell(parser, options, args, log)
def test_recognize_nodejs_file_with_interpreter(self): # If we have a node interpreter on our path, then these will be seen as # Node.js files, otherwise they are seen as JavaScript files. manifest = [ (tempfile.mktemp(".js"), """\ require('console'); """), (tempfile.mktemp(".js"), """\ module.exports = {}; """), (tempfile.mktemp(".js"), """\ foo.on('something', function(event) { console.log(event.name); }); """), ] import which try: which.which("node") lang = "Node.js" except which.WhichError: # Could not find node interpreter. import logging log = logging.getLogger("test") log.warn("No node interpreter was found on the path") lang = "JavaScript" for name, content in manifest: path = join(self.data_dir, name) _writefile(path, content) koDoc = self._koDocFromPath(path) self.assertEqual(koDoc.language, lang, "%r found, expected %r, content %r" % (koDoc.language, lang, content))
def mercurial_setup(self, update_only=False): """Ensure Mercurial is optimally configured. This command will inspect your Mercurial configuration and guide you through an interactive wizard helping you configure Mercurial for optimal use on Mozilla projects. User choice is respected: no changes are made without explicit confirmation from you. If "--update-only" is used, the interactive wizard is disabled and this command only ensures that remote repositories providing Mercurial extensions are up to date. """ import which import mozboot.bootstrap as bootstrap # "hg" is an executable script with a shebang, which will be found # be which.which. We need to pass a win32 executable to the function # because we spawn a process # from it. if sys.platform in ("win32", "msys"): hg = which.which("hg.exe") else: hg = which.which("hg") if update_only: bootstrap.update_vct(hg, self._context.state_dir) else: bootstrap.configure_mercurial(hg, self._context.state_dir)
def push_to_try(self, msg, verbose): if not self._use_git: try: hg_args = ['hg', 'push-to-try', '-m', msg] subprocess.check_call(hg_args, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print('ERROR hg command %s returned %s' % (hg_args, e.returncode)) print('\nmach failed to push to try. There may be a problem ' 'with your ssh key, or another issue with your mercurial ' 'installation.') # Check for the presence of the "push-to-try" extension, and # provide instructions if it can't be found. try: subprocess.check_output(['hg', 'showconfig', 'extensions.push-to-try']) except subprocess.CalledProcessError: print('\nThe "push-to-try" hg extension is required. It ' 'can be installed to Mercurial 3.3 or above by ' 'running ./mach mercurial-setup') sys.exit(1) else: try: which.which('git-cinnabar') self._git_push_to_try(msg) except which.WhichError: print('ERROR git-cinnabar is required to push from git to try with' 'the autotry command.\n\nMore information can by found at ' 'https://github.com/glandium/git-cinnabar') sys.exit(1)
def getNodeOrNpmPath(self, filename): """ Return the nodejs or npm path. """ if platform.system() == "Windows": for ext in [".cmd", ".exe", ""]: try: nodeOrNpmPath = which.which(filename + ext, path=self.getPossibleNodePathsWin()) if self.is_valid(nodeOrNpmPath): return nodeOrNpmPath except which.WhichError: pass else: try: return which.which(filename) except which.WhichError: pass if filename == "node": print(NODE_NOT_FOUND_MESSAGE) elif filename == "npm": print(NPM_NOT_FOUND_MESSAGE) if platform.system() == "Windows": appPaths = self.getPossibleNodePathsWin() for p in appPaths: print(" - %s" % p) elif platform.system() == "Darwin": print(" - /usr/local/bin/node") elif platform.system() == "Linux": print(" - /usr/bin/nodejs") return None
def office_path(self): path = os.environ['PATH'] office = which('ooffice', path=path) if not office: office = which('soffice', path=path) if not office: raise KeyError("Cannot find OpenOffice on path") return office[0]
def notify(self, msg): """Show a desktop notification with the supplied message On Linux and Mac, this will show a desktop notification with the message, but on Windows we can only flash the screen. """ moz_nospam = os.environ.get('MOZ_NOSPAM') if moz_nospam: return try: if sys.platform.startswith('darwin'): try: notifier = which.which('terminal-notifier') except which.WhichError: raise Exception('Install terminal-notifier to get ' 'a notification when the build finishes.') self.run_process([notifier, '-title', 'Mozilla Build System', '-group', 'mozbuild', '-message', msg], ensure_exit_code=False) elif sys.platform.startswith('linux'): try: notifier = which.which('notify-send') except which.WhichError: raise Exception('Install notify-send (usually part of ' 'the libnotify package) to get a notification when ' 'the build finishes.') self.run_process([notifier, '--app-name=Mozilla Build System', 'Mozilla Build System', msg], ensure_exit_code=False) elif sys.platform.startswith('win'): from ctypes import Structure, windll, POINTER, sizeof from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT class FLASHWINDOW(Structure): _fields_ = [("cbSize", UINT), ("hwnd", HANDLE), ("dwFlags", DWORD), ("uCount", UINT), ("dwTimeout", DWORD)] FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW)) FlashWindowEx = FlashWindowExProto(("FlashWindowEx", windll.user32)) FLASHW_CAPTION = 0x01 FLASHW_TRAY = 0x02 FLASHW_TIMERNOFG = 0x0C # GetConsoleWindows returns NULL if no console is attached. We # can't flash nothing. console = windll.kernel32.GetConsoleWindow() if not console: return params = FLASHWINDOW(sizeof(FLASHWINDOW), console, FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0) FlashWindowEx(params) except Exception as e: self.log(logging.WARNING, 'notifier-failed', {'error': e.message}, 'Notification center failed: {error}')
def test_info(self, **params): import which from mozbuild.base import MozbuildObject self.branches = params['branches'] self.start = params['start'] self.end = params['end'] self.show_info = params['show_info'] self.show_results = params['show_results'] self.show_durations = params['show_durations'] self.show_bugs = params['show_bugs'] self.verbose = params['verbose'] if (not self.show_info and not self.show_results and not self.show_durations and not self.show_bugs): # by default, show everything self.show_info = True self.show_results = True self.show_durations = True self.show_bugs = True here = os.path.abspath(os.path.dirname(__file__)) build_obj = MozbuildObject.from_environment(cwd=here) self._hg = None if conditions.is_hg(build_obj): if self._is_windows(): self._hg = which.which('hg.exe') else: self._hg = which.which('hg') self._git = None if conditions.is_git(build_obj): if self._is_windows(): self._git = which.which('git.exe') else: self._git = which.which('git') for test_name in params['test_names']: print("===== %s =====" % test_name) self.test_name = test_name if len(self.test_name) < 6: print("'%s' is too short for a test name!" % self.test_name) continue if self.show_info: self.set_test_name() if self.show_results: self.report_test_results() if self.show_durations: self.report_test_durations() if self.show_bugs: self.report_bugs()
def run_b2g_test(self, context, tests=None, suite='mochitest', **kwargs): """Runs a b2g mochitest.""" if kwargs.get('desktop'): kwargs['profile'] = kwargs.get('profile') or os.environ.get('GAIA_PROFILE') if not kwargs['profile'] or not os.path.isdir(kwargs['profile']): print(GAIA_PROFILE_NOT_FOUND) sys.exit(1) if os.path.isfile(os.path.join(kwargs['profile'], 'extensions', '*****@*****.**')): print(GAIA_PROFILE_IS_DEBUG.format(kwargs['profile'])) sys.exit(1) elif context.target_out: host_webapps_dir = os.path.join(context.target_out, 'data', 'local', 'webapps') if not os.path.isdir(os.path.join( host_webapps_dir, 'test-container.gaiamobile.org')): print(ENG_BUILD_REQUIRED.format(host_webapps_dir)) sys.exit(1) # TODO without os.chdir, chained imports fail below os.chdir(self.mochitest_dir) # The imp module can spew warnings if the modules below have # already been imported, ignore them. with warnings.catch_warnings(): warnings.simplefilter('ignore') import imp path = os.path.join(self.mochitest_dir, 'runtestsb2g.py') with open(path, 'r') as fh: imp.load_module('mochitest', fh, path, ('.py', 'r', imp.PY_SOURCE)) import mochitest options = Namespace(**kwargs) from manifestparser import TestManifest if tests: manifest = TestManifest() manifest.tests.extend(tests) options.manifestFile = manifest if options.desktop: return mochitest.run_desktop_mochitests(options) try: which.which('adb') except which.WhichError: # TODO Find adb automatically if it isn't on the path print(ADB_NOT_FOUND.format(options.b2gPath)) return 1 return mochitest.run_remote_mochitests(options)
def run_b2g_test(self, b2g_home, xre_path, test_file=None, **kwargs): """Runs a b2g mochitest. test_file is a path to a test file. It can be a relative path from the top source directory, an absolute filename, or a directory containing test files. """ try: which.which('adb') except which.WhichError: # TODO Find adb automatically if it isn't on the path raise Exception(ADB_NOT_FOUND % ('mochitest-remote', b2g_home)) # Need to call relpath before os.chdir() below. test_path = '' if test_file: test_path = self._wrap_path_argument(test_file).relpath() # TODO without os.chdir, chained imports fail below os.chdir(self.mochitest_dir) import imp path = os.path.join(self.mochitest_dir, 'runtestsb2g.py') with open(path, 'r') as fh: imp.load_module('mochitest', fh, path, ('.py', 'r', imp.PY_SOURCE)) import mochitest from mochitest_options import B2GOptions parser = B2GOptions() options = parser.parse_args([])[0] options.b2gPath = b2g_home options.consoleLevel = 'INFO' options.logcat_dir = self.mochitest_dir options.httpdPath = self.mochitest_dir options.xrePath = xre_path if test_path: test_root_file = mozpack.path.join(self.mochitest_dir, 'tests', test_path) if not os.path.exists(test_root_file): print('Specified test path does not exist: %s' % test_root_file) return 1 options.testPath = test_path else: options.testManifest = 'b2g.json' for k, v in kwargs.iteritems(): setattr(options, k, v) mochitest.run_remote_mochitests(parser, options)
def run_test(self, **kwargs): try: import which which.which('adb') except which.WhichError: # TODO Find adb automatically if it isn't on the path print(ADB_NOT_FOUND % ('mochitest-remote', kwargs["b2g_home"])) sys.exit(1) import runtestsb2g log = kwargs.pop("log") self.log_manager.enable_unstructured() if kwargs["xpcshell"] is None: kwargs["xpcshell"] = "xpcshell" if kwargs["b2g_path"] is None: kwargs["b2g_path"] = kwargs["b2g_home"] if kwargs["busybox"] is None: kwargs["busybox"] = os.environ.get('BUSYBOX') if kwargs["busybox"] is None: kwargs["busybox"] = self._download_busybox(kwargs["b2g_home"], kwargs["emulator"]) if kwargs["localLib"] is None: kwargs["localLib"] = self.bin_dir if kwargs["localBin"] is None: kwargs["localBin"] = self.bin_dir if kwargs["logdir"] is None: kwargs["logdir"] = self.xpcshell_dir if kwargs["manifest"] is None: kwargs["manifest"] = os.path.join(self.xpcshell_dir, 'xpcshell.ini') if kwargs["mozInfo"] is None: kwargs["mozInfo"] = os.path.join(self.topobjdir, 'mozinfo.json') if kwargs["objdir"] is None: kwargs["objdir"] = self.topobjdir if kwargs["symbolsPath"] is None: kwargs["symbolsPath"] = os.path.join(self.distdir, 'crashreporter-symbols') if kwargs["testingModulesDir"] is None: kwargs["testingModulesDir"] = os.path.join(self.tests_dir, 'modules') if kwargs["use_device_libs"] is None: kwargs["use_device_libs"] = True if kwargs["device_name"].startswith('emulator') and 'x86' in kwargs["device_name"]: kwargs["emulator"] = 'x86' parser = parser_b2g() options = argparse.Namespace(**kwargs) rv = runtestsb2g.run_remote_xpcshell(parser, options, log) self.log_manager.disable_unstructured() return rv
def _getP4D(): try: return which.which("p4d", path=[os.curdir]) except which.WhichError: try: return which.which("p4d") except which.WhichError: raise TestError( "cannot not find a 'p4d' Perforce server binary " "to use for testing. You must download one from " "http://www.perforce.com/perforce/loadprog.html " "to the current directory or to somewhere on " "your PATH" )
def get_tool(config, key): f = None if key in config: f = config[key] if os.path.isabs(f): if not os.path.exists(f): raise ValueError("%s must point to an existing path" % key) return f # Assume that we have the name of some program that should be on PATH. try: return which.which(f) if f else which.which(key) except which.WhichError: raise ValueError("%s not found on PATH" % f)
def preload(self, progress_cb=None): """Pre-load the stdlibs zone, if able. "progress_cb" (optional) is a callable that is called as follows to show the progress of the update: progress_cb(<desc>, <value>) where <desc> is a short string describing the current step and <value> is an integer between 0 and 100 indicating the level of completeness. Use `.can_preload()' to determine if able to pre-load. """ import which import process log.debug("preloading stdlibs zone") if progress_cb: try: progress_cb("Preloading stdlibs...", None) except: log.exception("error in progress_cb (ignoring)") preload_zip = self._get_preload_zip() unzip_exe = which.which("unzip") cmd = '"%s" -q -d "%s" "%s"' % (unzip_exe, dirname(self.base_dir), preload_zip) p = process.ProcessOpen(cmd, stdin=None) stdout, stderr = p.communicate() retval = p.wait() if retval: raise OSError("error running '%s'" % cmd)
def is_installed(self): """Return True iff SELinux extensions are installed (whether or not they are enabled). """ if not sys.platform.startswith("linux"): log.debug("SELinux is not installed: this is not Linux") return False # Determine if SELinux-extensions are installed. landmarks = [ "/usr/sbin/selinuxenabled", "/etc/selinux/config", "/sbin/selinuxenabled", ] for landmark in landmarks: if os.path.exists(landmark): log.debug("SELinux is installed: `%s' exists", landmark) return True else: import which try: selinuxenabled_path = which.which("selinuxenabled") except which.WhichError: pass else: log.debug("SELinux is installed: `%s' exists", selinuxenabled_path) return True log.debug("SELinux is not installed: could not find any of its " "landmarks")
def install_dependencies(self): install_directory = self.config.directory.bin_directory + os.sep + u"node" #check for node here unzipped_node_path = self.findUnzippedNodePath() if self.config.system.operating_system in ["mac","linux"] and not which("node"): print "Copying node into /usr/local/bin/..." shutil.copy(unzipped_node_path + os.sep + "bin" + os.sep + "node","/usr/local/bin/") os.chmod("/usr/local/bin/node",0777) shutil.copytree(self.findUnzippedNodePath(),install_directory) wants_to_upgrade = True if self.check_if_executable_installed(u"npm"): warning_string = u"A previous version of npm has been found. \nYou may experience problems if you have a version of npm that's too old.Would you like to upgrade?(y/n) " from distutils.util import strtobool print warning_string #for bash script, you have to somehow redirect stdin to raw_input() user_input = raw_input() while True: try: wants_to_upgrade = strtobool(user_input) except: print u"Please enter y or n. " continue break if wants_to_upgrade: import urllib2, urllib print u"Retrieving npm update script..." npm_install_script_path = install_directory + os.sep + u"install.sh" urllib.urlretrieve(u"https://npmjs.org/install.sh",filename=npm_install_script_path) print u"Retrieved npm install script. Executing..." subprocess.call([u"sh", npm_install_script_path]) print u"Updated npm version installed"
def getBundleFromTree(tree): """Get the URL of a bundle given a tree name""" url = "http://ftp.mozilla.org/pub/mozilla.org/firefox/bundles/%s.hg" % (tree,) wget = which("wget") filename = os.path.abspath("%s.hg" % (tree,)) check_call([wget, "--progress=dot:mega", "-O", filename, url]) return filename
def interpreter_from_env(self, env): """Returns: - absolute path to either the preferred or default system interpreter - None if none of the above exists """ # Gather information about the current python. python = None if env.has_pref(self.interpreterPrefName): python = env.get_pref(self.interpreterPrefName).strip() or None if not python or not exists(python): import which syspath = env.get_envvar("PATH", "") path = [d.strip() for d in syspath.split(os.pathsep) if d.strip()] try: python = which.which("python", path=path) except which.WhichError: pass # intentionally supressed if python: python = os.path.abspath(python) return python
def debug(self, params, remote, background, gdbparams): import which try: debugger = which.which('gdb') except Exception as e: print("You don't have gdb in your PATH") print(e) return 1 args = [debugger] if gdbparams: import pymake.process (argv, badchar) = pymake.process.clinetoargv(gdbparams, os.getcwd()) if badchar: print("The +gdbparams you passed require a real shell to parse them.") print("(We can't handle the %r character.)" % (badchar,)) return 1 args.extend(argv) try: args.extend(['--args', self.get_binary_path('app')]) except Exception as e: print("It looks like your program isn't built.", "You can run |mach build| to build it.") print(e) return 1 if not remote: args.append('-no-remote') if not background and sys.platform == 'darwin': args.append('-foreground') if params: args.extend(params) return self.run_process(args=args, ensure_exit_code=False, pass_thru=True)
def get_valid_csp_solver_config(sugarjar_path, minisat_path=None, tmp_folder=None): if not sugarjar_path or not os.path.exists(sugarjar_path): raise ConfigurationException("Please pass existing sugar.jar") if minisat_path and os.path.exists(minisat_path): pass else: if minisat_path: print(("Passed minisat binary does " "not exist"), minisat_path, "Trying PATH") if os.path.exists("minisat"): minisat_path = os.path.abspath("minisat") else: try: minisat_path = which.which("minisat") except which.WhichError as exc: raise ConfigurationException( "Please pass an existing minisat2 executable " "or install minisat2 as 'minisat' in PATH" ) if tmp_folder: folder = os.path.abspath(tmp_folder) else: import tempfile folder = tempfile.gettempdir() if not os.path.exists(folder): raise ConfigurationException("Please pass existing tmp-folder, '%s'" "does not exist" % folder) return dict(minisat_path=minisat_path, sugarjar_path=sugarjar_path, tmp_folder=folder)
def lower(self, filename): assert self.canCompile(filename) backend_path = which('vir-optimizer') outputFilename = self.getOutputFilename(filename) safeRemove(outputFilename) command = backend_path + " -i " + filename + " -o " + \ outputFilename + " " + \ self.interpretOptimizations(self.driver.optimizationLevel) if self.driver.verbose: command += " -v" if self.driver.verbose: print 'Running vir-optimizer with: "' + command + '"' start = time() process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdOutData, stdErrData) = process.communicate() if self.driver.verbose: print ' time: ' + str(time() - start) + ' seconds' if not os.path.isfile(outputFilename): raise SystemError('vir-optimizer failed to generate an ' + \ 'output file: \n' \ + stdOutData + stdErrData) return outputFilename
def get_cargo_path(self): try: return self.substs['CARGO'] except (BuildEnvironmentNotFoundException, KeyError): # Default if this tree isn't configured. import which return which.which('cargo')
def _get_nodejs_version_from_env(self, env=None): import process executable = env.get_pref("nodejsDefaultInterpreter", None) if not executable: import which path = [d.strip() for d in env.get_envvar("PATH", "").split(os.pathsep) if d.strip()] try: executable = which.which("node", path=path) except which.WhichError: pass if not executable: return None if not os.path.exists(executable): log.info("Node.js executable %s does not exist", executable) return None p = process.ProcessOpen([executable, "--version"], env=env.get_all_envvars(), stdin=None) stdout, stderr = p.communicate() if p.returncode != 0: log.info("Failed to find Node.js version: %r: %s", p.returncode, stderr) return None # Failed to run version = stdout.lstrip("v") short_ver = ".".join(version.split(".", 2)[:2]) return short_ver
def check_program(): """Check dependencies.""" program_list = ['lastal, kraken, poretools'] for program in program_list: if not which.which(program): print(program + 'is not available') sys.exit(1)
def find_program(file): if is_absolute_or_relative(file): return os.path.abspath(file) if os.path.isfile(file) else None try: return which(file) except WhichError: return None
def check_if_ruby_exists(self): ruby_path = which(u"ruby") return bool(ruby_path)
def check_if_gem_exists(self): gem_path = which(u"gem") return bool(gem_path)
def run(self, config_paths): try: os.makedirs(self.ext_dir) except OSError as e: if e.errno != errno.EEXIST: raise try: hg = which.which('hg') except which.WhichError as e: print(e) print('Try running |mach bootstrap| to ensure your environment is ' 'up to date.') return 1 try: c = MercurialConfig(config_paths) except ConfigObjError as e: print('Error importing existing Mercurial config!\n') for error in e.errors: print(error.message) return 1 except HgIncludeException as e: print(e.message) return 1 print(INITIAL_MESSAGE) raw_input() hg_version = get_hg_version(hg) if hg_version < OLDEST_NON_LEGACY_VERSION: print(LEGACY_MERCURIAL % hg_version) print('') if os.name == 'nt': print('Please upgrade to the latest MozillaBuild to upgrade ' 'your Mercurial install.') print('') else: print('Please run |mach bootstrap| to upgrade your Mercurial ' 'install.') print('') if not self._prompt_yn('Would you like to continue using an old ' 'Mercurial version'): return 1 if not c.have_valid_username(): print(MISSING_USERNAME) print('') name = self._prompt('What is your name?') email = self._prompt('What is your email address?') c.set_username(name, email) print('Updated your username.') print('') if not c.have_recommended_diff_settings(): print(BAD_DIFF_SETTINGS) print('') if self._prompt_yn('Would you like me to fix this for you'): c.ensure_recommended_diff_settings() print('Fixed patch settings.') print('') self.prompt_native_extension( c, 'progress', 'Would you like to see progress bars during Mercurial operations') self.prompt_native_extension( c, 'color', 'Would you like Mercurial to colorize output to your terminal') self.prompt_native_extension( c, 'rebase', 'Would you like to enable the rebase extension to allow you to move' ' changesets around (which can help maintain a linear history)') self.prompt_native_extension( c, 'histedit', 'Would you like to enable the histedit extension to allow history ' 'rewriting via the "histedit" command (similar to ' '`git rebase -i`)') self.prompt_native_extension(c, 'mq', MQ_INFO) self.prompt_external_extension(c, 'bzexport', BZEXPORT_INFO) if 'reviewboard' not in c.extensions: if hg_version < REVIEWBOARD_MINIMUM_VERSION: print(REVIEWBOARD_INCOMPATIBLE % REVIEWBOARD_MINIMUM_VERSION) else: p = os.path.join(self.vcs_tools_dir, 'hgext', 'reviewboard', 'client.py') self.prompt_external_extension( c, 'reviewboard', 'Would you like to enable the reviewboard extension so ' 'you can easily initiate code reviews against Mozilla ' 'projects', path=p) if hg_version >= BZPOST_MINIMUM_VERSION: self.prompt_external_extension(c, 'bzpost', BZPOST_INFO) if hg_version >= FIREFOXTREE_MINIMUM_VERSION: self.prompt_external_extension(c, 'firefoxtree', FIREFOXTREE_INFO) if 'mq' in c.extensions: self.prompt_external_extension(c, 'mqext', MQEXT_INFO, os.path.join(self.ext_dir, 'mqext')) if 'mqext' in c.extensions: self.update_mercurial_repo( hg, 'https://bitbucket.org/sfink/mqext', os.path.join(self.ext_dir, 'mqext'), 'default', 'Ensuring mqext extension is up to date...') if 'mqext' in c.extensions and not c.have_mqext_autocommit_mq(): if self._prompt_yn( 'Would you like to configure mqext to ' 'automatically commit changes as you modify patches'): c.ensure_mqext_autocommit_mq() print('Configured mqext to auto-commit.\n') self.prompt_external_extension(c, 'qimportbz', QIMPORTBZ_INFO) if not c.have_qnew_currentuser_default(): print(QNEWCURRENTUSER_INFO) if self._prompt_yn( 'Would you like qnew to set patch author by ' 'default'): c.ensure_qnew_currentuser_default() print('Configured qnew to set patch author by default.') print('') if 'reviewboard' in c.extensions or 'bzpost' in c.extensions: bzuser, bzpass = c.get_bugzilla_credentials() if not bzuser or not bzpass: print(MISSING_BUGZILLA_CREDENTIALS) if not bzuser: bzuser = self._prompt('What is your Bugzilla email address?', allow_empty=True) if bzuser and not bzpass: bzpass = self._prompt('What is your Bugzilla password?', allow_empty=True) if bzuser or bzpass: c.set_bugzilla_credentials(bzuser, bzpass) if self.update_vcs_tools: self.update_mercurial_repo( hg, 'https://hg.mozilla.org/hgcustom/version-control-tools', self.vcs_tools_dir, 'default', 'Ensuring version-control-tools is up to date...') # Look for and clean up old extensions. for ext in {'bzexport', 'qimportbz'}: path = os.path.join(self.ext_dir, ext) if os.path.exists(path): if self._prompt_yn('Would you like to remove the old and no ' 'longer referenced repository at %s' % path): print('Cleaning up old repository: %s' % path) shutil.rmtree(path) c.add_mozilla_host_fingerprints() b = StringIO() c.write(b) new_lines = [line.rstrip() for line in b.getvalue().splitlines()] old_lines = [] config_path = c.config_path if os.path.exists(config_path): with open(config_path, 'rt') as fh: old_lines = [line.rstrip() for line in fh.readlines()] diff = list( difflib.unified_diff(old_lines, new_lines, 'hgrc.old', 'hgrc.new')) if len(diff): print('Your Mercurial config file needs updating. I can do this ' 'for you if you like!') if self._prompt_yn('Would you like to see a diff of the changes ' 'first'): for line in diff: print(line) print('') if self._prompt_yn('Would you like me to update your hgrc file'): with open(config_path, 'wt') as fh: c.write(fh) print('Wrote changes to %s.' % config_path) else: print('hgrc changes not written to file. I would have ' 'written the following:\n') c.write(sys.stdout) return 1 print(FINISHED) return 0
def build(self, what=None, disable_extra_make_dependencies=None, jobs=0, verbose=False): import which from mozbuild.controller.building import BuildMonitor from mozbuild.util import resolve_target_to_make self.log_manager.register_structured_logger( logging.getLogger('mozbuild')) warnings_path = self._get_state_filename('warnings.json') monitor = self._spawn(BuildMonitor) monitor.init(warnings_path) ccache_start = monitor.ccache_stats() with BuildOutputManager(self.log_manager, monitor) as output: monitor.start() if what: top_make = os.path.join(self.topobjdir, 'Makefile') if not os.path.exists(top_make): print('Your tree has not been configured yet. Please run ' '|mach build| with no arguments.') return 1 # Collect target pairs. target_pairs = [] for target in what: path_arg = self._wrap_path_argument(target) make_dir, make_target = resolve_target_to_make( self.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 # See bug 886162 - we don't want to "accidentally" build # the entire tree (if that's really the intent, it's # unlikely they would have specified a directory.) if not make_dir and not make_target: print("The specified directory doesn't contain a " "Makefile and the first parent with one is the " "root of the tree. Please specify a directory " "with a Makefile or run |mach build| if you " "want to build the entire tree.") return 1 target_pairs.append((make_dir, make_target)) # Possibly add extra make depencies using dumbmake. if not disable_extra_make_dependencies: from dumbmake.dumbmake import (dependency_map, add_extra_dependencies) depfile = os.path.join(self.topsrcdir, 'build', 'dumbmake-dependencies') with open(depfile) as f: dm = dependency_map(f.readlines()) new_pairs = list(add_extra_dependencies(target_pairs, dm)) self.log( logging.DEBUG, 'dumbmake', { 'target_pairs': target_pairs, 'new_pairs': new_pairs }, 'Added extra dependencies: will build {new_pairs} ' + 'instead of {target_pairs}.') target_pairs = new_pairs # Ensure build backend is up to date. The alternative is to # have rules in the invoked Makefile to rebuild the build # backend. But that involves make reinvoking itself and there # are undesired side-effects of this. See bug 877308 for a # comprehensive history lesson. self._run_make(directory=self.topobjdir, target='backend.RecursiveMakeBackend', line_handler=output.on_line, log=False, print_directory=False) # Build target pairs. for make_dir, make_target in target_pairs: # We don't display build status messages during partial # tree builds because they aren't reliable there. This # could potentially be fixed if the build monitor were more # intelligent about encountering undefined state. status = self._run_make( directory=make_dir, target=make_target, line_handler=output.on_line, log=False, print_directory=False, ensure_exit_code=False, num_jobs=jobs, silent=not verbose, append_env={b'NO_BUILDSTATUS_MESSAGES': b'1'}) if status != 0: break else: monitor.start_resource_recording() status = self._run_make(srcdir=True, filename='client.mk', line_handler=output.on_line, log=False, print_directory=False, allow_parallel=False, ensure_exit_code=False, num_jobs=jobs, silent=not verbose) make_extra = self.mozconfig['make_extra'] or [] make_extra = dict(m.split('=', 1) for m in make_extra) # For universal builds, we need to run the automation steps in # the first architecture from MOZ_BUILD_PROJECTS projects = make_extra.get('MOZ_BUILD_PROJECTS') if projects: subdir = os.path.join(self.topobjdir, projects.split()[0]) else: subdir = self.topobjdir moz_automation = os.getenv('MOZ_AUTOMATION') or make_extra.get( 'export MOZ_AUTOMATION', None) if moz_automation and status == 0: status = self._run_make(target='automation/build', directory=subdir, line_handler=output.on_line, log=False, print_directory=False, ensure_exit_code=False, num_jobs=jobs, silent=not verbose) self.log(logging.WARNING, 'warning_summary', {'count': len(monitor.warnings_database)}, '{count} compiler warnings present.') monitor.finish(record_usage=status == 0) high_finder, finder_percent = monitor.have_high_finder_usage() if high_finder: print(FINDER_SLOW_MESSAGE % finder_percent) ccache_end = monitor.ccache_stats() if ccache_start and ccache_end: ccache_diff = ccache_end - ccache_start if ccache_diff: self.log(logging.INFO, 'ccache', {'msg': ccache_diff.hit_rate_message()}, "{msg}") moz_nospam = os.environ.get('MOZ_NOSPAM') if monitor.elapsed > 300 and not moz_nospam: # Display a notification when the build completes. # This could probably be uplifted into the mach core or at least # into a helper API. It is here as an experimentation to see how it # is received. try: if sys.platform.startswith('darwin'): try: notifier = which.which('terminal-notifier') except which.WhichError: raise Exception( 'Install terminal-notifier to get ' 'a notification when the build finishes.') self.run_process([ notifier, '-title', 'Mozilla Build System', '-group', 'mozbuild', '-message', 'Build complete' ], ensure_exit_code=False) elif sys.platform.startswith('linux'): try: import dbus except ImportError: raise Exception( 'Install the python dbus module to ' 'get a notification when the build finishes.') bus = dbus.SessionBus() notify = bus.get_object('org.freedesktop.Notifications', '/org/freedesktop/Notifications') method = notify.get_dbus_method( 'Notify', 'org.freedesktop.Notifications') method('Mozilla Build System', 0, '', 'Build complete', '', [], [], -1) elif sys.platform.startswith('win'): from ctypes import Structure, windll, POINTER, sizeof from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT class FLASHWINDOW(Structure): _fields_ = [("cbSize", UINT), ("hwnd", HANDLE), ("dwFlags", DWORD), ("uCount", UINT), ("dwTimeout", DWORD)] FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW)) FlashWindowEx = FlashWindowExProto( ("FlashWindowEx", windll.user32)) FLASHW_CAPTION = 0x01 FLASHW_TRAY = 0x02 FLASHW_TIMERNOFG = 0x0C params = FLASHWINDOW( sizeof(FLASHWINDOW), windll.kernel32.GetConsoleWindow(), FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0) FlashWindowEx(params) except Exception as e: self.log(logging.WARNING, 'notifier-failed', {'error': e.message}, 'Notification center failed: {error}') if status: return status long_build = monitor.elapsed > 600 if long_build: output.on_line( 'We know it took a while, but your build finally finished successfully!' ) else: output.on_line('Your build was successful!') if monitor.have_resource_usage: excessive, swap_in, swap_out = monitor.have_excessive_swapping() # if excessive: # print(EXCESSIVE_SWAP_MESSAGE) print('To view resource usage of the build, run |mach ' 'resource-usage|.') # Only for full builds because incremental builders likely don't # need to be burdened with this. if not what: try: # Fennec doesn't have useful output from just building. We should # arguably make the build action useful for Fennec. Another day... if self.substs['MOZ_BUILD_APP'] != 'mobile/android': print( 'To take your build for a test drive, run: |mach run|') app = self.substs['MOZ_BUILD_APP'] if app in ('browser', 'mobile/android'): print( 'For more information on what to do now, see ' 'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox' ) except Exception: # Ignore Exceptions in case we can't find config.status (such # as when doing OSX Universal builds) pass return status
def _install_desktop_shortcut(absInstallDir, suppressShortcut): """Install a desktop shortcut as appropriate. "absInstallDir" is the absolute path to which Komodo was just installed "suppressShortcut" is a boolean indicating if creation of the desktop shortcut is to be suppressed. - If we are install under $HOME then look for and install the desktop shortcut to $HOME/Desktop, if it exists. - Otherwise, attempt to add the .desktop shortcut to shared applications shortcuts dir. (See bug 32351 for details). """ # Put together the shortcut content. content = """\ [Desktop Entry] Encoding=UTF-8 Name=__GNOME_DESKTOP_NAME__ GenericName=__GNOME_DESKTOP_GENERIC_NAME__ Comment=__PRODUCT_TAG_LINE__ Exec=%s/lib/mozilla/komodo %%F Icon=%s/share/icons/komodo48.png Terminal=false Type=Application MimeType=text/plain; Categories=__GNOME_DESKTOP_CATEGORIES__ """ % (absInstallDir, absInstallDir) shortcutName = "__GNOME_DESKTOP_SHORTCUT_NAME__" class ShortcutInstallError(Exception): pass # Write desktop file to a temporary file. tempDir = tempfile.mkdtemp() try: tempPath = join(tempDir, shortcutName) file(tempPath, "w").write(content) if suppressShortcut: raise ShortcutInstallError("shortcut suppressed by user") # Use 'xdg-desktop-menu' and 'xdg-desktop-icon' if it's available. xdg_exe_name = 'xdg-desktop-menu' try: xdg_exe = which.which(xdg_exe_name) except which.WhichError: pass else: try: _run("xdg-desktop-menu install --novendor %s" % (tempPath)) _run("xdg-desktop-icon install --novendor %s" % (tempPath)) log.info("%r created successfully", shortcutName) return except OSError: # Fallback to manual install. pass # Determine if installing under HOME or to shared area. HOME = os.environ.get("HOME", None) if not HOME: raise ShortcutInstallError("no HOME environment variable") elif absInstallDir.startswith(HOME): shortcutDir = join(HOME, "Desktop") else: shortcutDir = "/usr/share/applications" shortcutPath = join(shortcutDir, shortcutName) # Attempt to write the Komodo shortcut. # (We DO overwrite an existing such shortcut.) if not exists(shortcutDir): raise ShortcutInstallError("'%s' does not exist" % shortcutDir) else: shutil.copy(tempPath, shortcutPath) # Ensure the desktop shortcut has executable permissions. _ensure_executable(shortcutPath) except (EnvironmentError, ShortcutInstallError), ex: fallbackDir = join(absInstallDir, "share", "desktop") fallbackPath = join(fallbackDir, shortcutName) try: if not exists(fallbackDir): os.makedirs(fallbackDir) shutil.copy(tempPath, fallbackPath) # Ensure the backup desktop shortcut has executable permissions. _ensure_executable(fallbackPath) except EnvironmentError, ex2: log.warn( "unexpected error creating fallback .desktop file " "'%s': %s", fallbackPath, ex2)
def run(self, config_paths): try: os.makedirs(self.ext_dir) except OSError as e: if e.errno != errno.EEXIST: raise try: hg = which.which('hg') except which.WhichError as e: print(e) print('Try running |mach bootstrap| to ensure your environment is ' 'up to date.') return 1 try: c = MercurialConfig(config_paths) except ConfigObjError as e: print('Error importing existing Mercurial config!\n' '%s\n' 'If using quotes, they must wrap the entire string.' % e) return 1 print(INITIAL_MESSAGE) raw_input() if not c.have_valid_username(): print(MISSING_USERNAME) print('') name = self._prompt('What is your name?') email = self._prompt('What is your email address?') c.set_username(name, email) print('Updated your username.') print('') if not c.have_recommended_diff_settings(): print(BAD_DIFF_SETTINGS) print('') if self._prompt_yn('Would you like me to fix this for you'): c.ensure_recommended_diff_settings() print('Fixed patch settings.') print('') active = c.extensions if 'progress' not in active: if self._prompt_yn('Would you like to see progress bars during ' 'long-running Mercurial operations'): c.activate_extension('progress') print('Activated progress extension.') print('') if 'color' not in active: if self._prompt_yn('Would you like Mercurial to colorize output ' 'to your terminal'): c.activate_extension('color') print('Activated color extension.') print('') if 'rebase' not in active: if self._prompt_yn('Would you like to enable the rebase extension ' 'to allow you to move changesets around (which can help ' 'maintain a linear history)'): c.activate_extension('rebase') print('Activated rebase extension.') print('') update_vcs_tools = False activate_bzexport = False if 'bzexport' not in active: print(BZEXPORT_INFO) if self._prompt_yn('Would you like to activate bzexport'): activate_bzexport = True update_vcs_tools = True else: activate_bzexport = True if activate_bzexport: update_vcs_tools = True c.activate_extension('bzexport', os.path.join(self.vcs_tools_dir, 'hgext', 'bzexport')) print('Activated bzexport extension.') print('') if 'mq' not in active: if self._prompt_yn('Would you like to activate the mq extension ' 'to manage patches'): c.activate_extension('mq') print('Activated mq extension.') print('') active = c.extensions if 'mq' in active: update_mqext = 'mqext' in active if 'mqext' not in active: print(MQEXT_INFO) if self._prompt_yn('Would you like to activate mqext and ' 'automatically commit changes as you modify patches'): update_mqext = True c.activate_extension('mqext', os.path.join(self.ext_dir, 'mqext')) c.autocommit_mq(True) print('Activated mqext extension.') print('') if update_mqext: self.update_mercurial_repo( hg, 'https://bitbucket.org/sfink/mqext', os.path.join(self.ext_dir, 'mqext'), 'default', 'Ensuring mqext extension is up to date...') update_qimportbz = 'qimportbz' in active if 'qimportbz' not in active: print(QIMPORTBZ_INFO) if self._prompt_yn('Would you like to activate qimportbz'): update_qimportbz = True c.activate_extension('qimportbz', os.path.join(self.ext_dir, 'qimportbz')) print('Activated qimportbz extension.') print('') if update_qimportbz: self.update_mercurial_repo( hg, 'https://hg.mozilla.org/users/robarnold_cmu.edu/qimportbz', os.path.join(self.ext_dir, 'qimportbz'), 'default', 'Ensuring qimportbz extension is up to date...') if not c.have_qnew_currentuser_default(): print(QNEWCURRENTUSER_INFO) if self._prompt_yn('Would you like qnew to set patch author by ' 'default'): c.ensure_qnew_currentuser_default() print('Configured qnew to set patch author by default.') print('') if update_vcs_tools: self.update_mercurial_repo( hg, 'https://hg.mozilla.org/hgcustom/version-control-tools', self.vcs_tools_dir, 'default', 'Ensuring version-control-tools is up to date...') # Look for and clean up old extensions. for ext in {'bzexport',}: path = os.path.join(self.ext_dir, ext) if os.path.exists(path): if self._prompt_yn('Would you like to remove the old and no ' 'longer referenced repository at %s' % path): print('Cleaning up old repository: %s' % path) shutil.rmtree(path) c.add_mozilla_host_fingerprints() b = StringIO() c.write(b) new_lines = [line.rstrip() for line in b.getvalue().splitlines()] old_lines = [] config_path = c.config_path if os.path.exists(config_path): with open(config_path, 'rt') as fh: old_lines = [line.rstrip() for line in fh.readlines()] diff = list(difflib.unified_diff(old_lines, new_lines, 'hgrc.old', 'hgrc.new')) if len(diff): print('Your Mercurial config file needs updating. I can do this ' 'for you if you like!') if self._prompt_yn('Would you like to see a diff of the changes ' 'first'): for line in diff: print(line) print('') if self._prompt_yn('Would you like me to update your hgrc file'): with open(config_path, 'wt') as fh: c.write(fh) print('Wrote changes to %s.' % config_path) else: print('hgrc changes not written to file. I would have ' 'written the following:\n') c.write(sys.stdout) return 1 print(FINISHED) return 0
def generate(env, **kw): env.Tool('arm-eabi-gcc') flasherFound = False flasher_windows = r"C:\Program Files (x86)\STMicroelectronics\STM32 ST-LINK Utility\ST-LINK Utility\ST-LINK_CLI.exe" if not flasherFound and env.Detect("openocd"): if not 'DEVICE_NAME' in env and not 'OCD_INTERFACE' in env and not 'DEVICE' in env: raise Exception( "No device name specified. Set DEVICE_NAME with valid device name." ) if 'DEVICE_NAME' in env: deviceName = env['DEVICE_NAME'] if 'DEVICE' in env: deviceName = env['DEVICE']['name'] devFmily = getFamilyFromName(deviceName) ocdInterface = ocdInterfaces[devFmily] ocdTarget = ocdTargets[devFmily] env["OCD_INTERFACE"] = ocdInterface env["OCD_TARGET"] = ocdTarget openOcdLocation = which.which('openocd') if openOcdLocation is not None: openOcdPath, fname = os.path.split(openOcdLocation) env['FLASH_TOOL'] = os.path.abspath(openOcdLocation) env['FLASH_TOOL_PATH'] = openOcdPath flashBuilder = Builder(action=openOcdFlashImage, src_suffix=".hex", suffix="_flash.log") runBuilder = Builder(action=openOcdRun, src_suffix=".hex", suffix="_run.log") debugBuilder = Builder(action=openOcdDebug, src_suffix=".elf", suffix="_debug.log") env.Append(BUILDERS={ 'Flash': flashBuilder, 'Run': runBuilder, 'Debug': debugBuilder }) flasherFound = True if not flasherFound: if os.path.isfile(flasher_windows): env['FLASH_TOOL'] = '"%s"' % (flasher_windows) flashBuilder = Builder(action=stLinkFlashImage, src_suffix=".hex", suffix="_flash.log") runBuilder = Builder(action=stLinkRun, src_suffix=".hex", suffix="_run.log") env.Append(BUILDERS={'Flash': flashBuilder, 'Run': runBuilder}) flasherFound = True else: print("ST-LINK Utility is not found in default location") if not flasherFound: print("No supported flashing tool found. ")
def run_b2g_test(self, test_paths=None, b2g_home=None, xre_path=None, total_chunks=None, this_chunk=None, no_window=None, repeat=0, run_until_failure=False, chrome=False, **kwargs): """Runs a b2g mochitest. test_paths is an enumerable of paths to tests. It can be a relative path from the top source directory, an absolute filename, or a directory containing test files. """ # Need to call relpath before os.chdir() below. test_path = '' if test_paths: if len(test_paths) > 1: print('Warning: Only the first test path will be used.') test_path = self._wrap_path_argument(test_paths[0]).relpath() # TODO without os.chdir, chained imports fail below os.chdir(self.mochitest_dir) # The imp module can spew warnings if the modules below have # already been imported, ignore them. with warnings.catch_warnings(): warnings.simplefilter('ignore') import imp path = os.path.join(self.mochitest_dir, 'runtestsb2g.py') with open(path, 'r') as fh: imp.load_module('mochitest', fh, path, ('.py', 'r', imp.PY_SOURCE)) import mochitest from mochitest_options import B2GOptions parser = B2GOptions() options = parser.parse_args([])[0] if test_path: if chrome: test_root_file = mozpath.join(self.mochitest_dir, 'chrome', test_path) else: test_root_file = mozpath.join(self.mochitest_dir, 'tests', test_path) if not os.path.exists(test_root_file): print('Specified test path does not exist: %s' % test_root_file) return 1 options.testPath = test_path for k, v in kwargs.iteritems(): setattr(options, k, v) options.noWindow = no_window options.totalChunks = total_chunks options.thisChunk = this_chunk options.repeat = repeat options.runUntilFailure = run_until_failure options.symbolsPath = os.path.join(self.distdir, 'crashreporter-symbols') options.consoleLevel = 'INFO' if conditions.is_b2g_desktop(self): options.desktop = True options.app = self.get_binary_path() if not options.app.endswith('-bin'): options.app = '%s-bin' % options.app if not os.path.isfile(options.app): options.app = options.app[:-len('-bin')] return mochitest.run_desktop_mochitests(parser, options) try: which.which('adb') except which.WhichError: # TODO Find adb automatically if it isn't on the path print(ADB_NOT_FOUND % ('mochitest-remote', b2g_home)) return 1 options.b2gPath = b2g_home options.logdir = self.mochitest_dir options.httpdPath = self.mochitest_dir options.xrePath = xre_path options.chrome = chrome return mochitest.run_remote_mochitests(parser, options)
def configure_devices(self): """ Ensure devices.ini is set up. """ keep_going = True device_ini = os.path.join(self.config['base-dir'], 'devices.ini') if os.path.exists(device_ini): response = raw_input( "Use existing device configuration at %s? (Y/n) " % device_ini).strip() if 'n' not in response.lower(): self.build_obj.log( logging.INFO, "autophone", {}, "Using device configuration at %s" % device_ini) return keep_going keep_going = False self.build_obj.log( logging.INFO, "autophone", {}, "You must configure at least one Android device " "before running autophone.") response = raw_input("Configure devices now? (Y/n) ").strip() if response.lower().startswith('y') or response == '': response = raw_input( "Connect your rooted Android test device(s) with usb and press Enter " ) adb_path = 'adb' try: if os.path.exists(self.build_obj.substs["ADB"]): adb_path = self.build_obj.substs["ADB"] except: if self.verbose: self.build_obj.log(logging.ERROR, "autophone", {}, str(sys.exc_info()[0])) # No build environment? try: adb_path = which.which('adb') except which.WhichError: adb_path = raw_input( "adb not found. Enter path to adb: ").strip() if self.verbose: print("Using adb at %s" % adb_path) dm = DeviceManagerADB(autoconnect=False, adbPath=adb_path, retryLimit=1) device_index = 1 try: with open(os.path.join(self.config['base-dir'], 'devices.ini'), 'w') as f: for device in dm.devices(): serial = device[0] if self.verify_device(adb_path, serial): f.write("[device-%d]\nserialno=%s\n" % (device_index, serial)) device_index += 1 self.build_obj.log( logging.INFO, "autophone", {}, "Added '%s' to device configuration." % serial) keep_going = True else: self.build_obj.log( logging.WARNING, "autophone", {}, "Device '%s' is not rooted - skipping" % serial) except: self.build_obj.log( logging.ERROR, "autophone", {}, "Failed to get list of connected Android devices.") if self.verbose: self.build_obj.log(logging.ERROR, "autophone", {}, str(sys.exc_info()[0])) keep_going = False if device_index <= 1: self.build_obj.log( logging.ERROR, "autophone", {}, "No devices configured! (Can you see your rooted test device(s)" " in 'adb devices'?") keep_going = False if keep_going: self.config['devices-configured'] = True return keep_going
def check_if_executable_installed(self, name): executable_path = which(name) if executable_path: return True else: return False
def find_in_path(name): try: return which(name) except IOError: return None
def find_in_path(name): path = os.environ.get('PATH', os.defpath).split(os.pathsep) if config.get('bin_path') and config['bin_path'] != 'None': path.append(config['bin_path']) return which(name, path=os.pathsep.join(path))
def run(self, config_path): try: os.makedirs(self.ext_dir) except OSError as e: if e.errno != errno.EEXIST: raise try: hg = which.which('hg') except which.whichError as e: print(e) print('Try running |mach bootstrap| to ensure your environment is ' 'up to date.') return 1 c = MercurialConfig(config_path) print(INITIAL_MESSAGE) raw_input() if not c.have_valid_username(): print(MISSING_USERNAME) print('') name = self._prompt('What is your name?') email = self._prompt('What is your email address?') c.set_username(name, email) print('Updated your username.') print('') if not c.have_recommended_diff_settings(): print(BAD_DIFF_SETTINGS) print('') if self._prompt_yn('Would you like me to fix this for you'): c.ensure_recommended_diff_settings() print('Fixed patch settings.') print('') active = c.extensions if 'progress' not in active: if self._prompt_yn('Would you like to see progress bars during ' 'long-running Mercurial operations'): c.activate_extension('progress') print('Activated progress extension.') print('') if 'color' not in active: if self._prompt_yn('Would you like Mercurial to colorize output ' 'to your terminal'): c.activate_extension('color') print('Activated color extension.') print('') update_bzexport = 'bzexport' in active if 'bzexport' not in active: print(BZEXPORT_INFO) if self._prompt_yn('Would you like to activate bzexport'): update_bzexport = True c.activate_extension('bzexport', os.path.join(self.ext_dir, 'bzexport')) print('Activated bzexport extension.') print('') if update_bzexport: self.update_mercurial_repo( hg, 'https://hg.mozilla.org/users/tmielczarek_mozilla.com/bzexport', os.path.join(self.ext_dir, 'bzexport'), 'default', 'Ensuring bzexport extension is up to date...') if 'mq' not in active: if self._prompt_yn('Would you like to activate the mq extension ' 'to manage patches'): c.activate_extension('mq') print('Activated mq extension.') print('') active = c.extensions if 'mq' in active: update_mqext = 'mqext' in active if 'mqext' not in active: print(MQEXT_INFO) if self._prompt_yn( 'Would you like to activate mqext and ' 'automatically commit changes as you modify patches'): update_mqext = True c.activate_extension('mqext', os.path.join(self.ext_dir, 'mqext')) c.autocommit_mq(True) print('Activated mqext extension.') print('') if update_mqext: self.update_mercurial_repo( hg, 'https://bitbucket.org/sfink/mqext', os.path.join(self.ext_dir, 'mqext'), 'default', 'Ensuring mqext extension is up to date...') update_qimportbz = 'qimportbz' in active if 'qimportbz' not in active: print(QIMPORTBZ_INFO) if self._prompt_yn('Would you like to activate qimportbz'): update_qimportbz = True c.activate_extension( 'qimportbz', os.path.join(self.ext_dir, 'qimportbz')) print('Activated qimportbz extension.') print('') if update_qimportbz: self.update_mercurial_repo( hg, 'https://hg.mozilla.org/users/robarnold_cmu.edu/qimportbz', os.path.join(self.ext_dir, 'qimportbz'), 'default', 'Ensuring qimportbz extension is up to date...') c.add_mozilla_host_fingerprints() b = StringIO() c.write(b) new_lines = [line.rstrip() for line in b.getvalue().splitlines()] old_lines = [] if os.path.exists(config_path): with open(config_path, 'rt') as fh: old_lines = [line.rstrip() for line in fh.readlines()] diff = list( difflib.unified_diff(old_lines, new_lines, 'hgrc.old', 'hgrc.new')) if len(diff): print('Your Mercurial config file needs updating. I can do this ' 'for you if you like!') if self._prompt_yn('Would you like to see a diff of the changes ' 'first'): for line in diff: print(line) print('') if self._prompt_yn('Would you like me to update your hgrc file'): with open(config_path, 'wt') as fh: c.write(fh) print('Wrote changes to %s.' % config_path) else: print('hgrc changes not written to file. I would have ' 'written the following:\n') c.write(sys.stdout) return 1 print(FINISHED) return 0
def eclipse(self, ide, args): if ide == 'eclipse': backend = 'CppEclipse' elif ide == 'visualstudio': backend = 'VisualStudio' elif ide == 'androidstudio' or ide == 'intellij': # The build backend for Android Studio and IntelliJ is just the regular one. backend = 'RecursiveMake' if ide == 'eclipse': try: which.which('eclipse') except which.WhichError: print( 'Eclipse CDT 8.4 or later must be installed in your PATH.') print('Download: http://www.eclipse.org/cdt/downloads.php') return 1 elif ide == 'androidstudio' or ide == 'intellij': studio = ['studio'] if ide == 'androidstudio' else ['idea'] if sys.platform != 'darwin': try: which.which(studio[0]) except: self.print_ide_error(ide) return 1 else: # In order of preference! for d in self.get_mac_ide_preferences(ide): if os.path.isdir(d): studio = ['open', '-a', d] break else: print( 'Android Studio or IntelliJ IDEA 14 is not installed in /Applications.' ) return 1 # Here we refresh the whole build. 'build export' is sufficient here and is probably more # correct but it's also nice having a single target to get a fully built and indexed # project (gives a easy target to use before go out to lunch). res = self._mach_context.commands.dispatch('build', self._mach_context) if res != 0: return 1 if ide == 'androidstudio' or 'intellij': res = self._mach_context.commands.dispatch('package', self._mach_context) if res != 0: return 1 res = self._mach_context.commands.dispatch('gradle-install', self._mach_context) if res != 0: return 1 else: # Generate or refresh the IDE backend. python = self.virtualenv_manager.python_path config_status = os.path.join(self.topobjdir, 'config.status') args = [python, config_status, '--backend=%s' % backend] res = self._run_command_in_objdir(args=args, pass_thru=True, ensure_exit_code=False) if res != 0: return 1 if ide == 'eclipse': eclipse_workspace_dir = self.get_eclipse_workspace_path() process = subprocess.check_call( ['eclipse', '-data', eclipse_workspace_dir]) elif ide == 'visualstudio': visual_studio_workspace_dir = self.get_visualstudio_workspace_path( ) process = subprocess.check_call( ['explorer.exe', visual_studio_workspace_dir]) elif ide == 'androidstudio' or ide == 'intellij': gradle_dir = None if self.is_gradle_project_already_imported(): gradle_dir = self.get_gradle_project_path() else: gradle_dir = self.get_gradle_import_path() process = subprocess.check_call(studio + [gradle_dir])
def checkIfCurlExecutableExists(self): curlPath = which("curl") if curlPath: return True else: return False
def run_b2g_test(self, b2g_home=None, xre_path=None, test_file=None, suite=None, **kwargs): """Runs a b2g reftest. test_file is a path to a test file. It can be a relative path from the top source directory, an absolute filename, or a directory containing test files. suite is the type of reftest to run. It can be one of ('reftest', 'crashtest'). """ if suite not in ('reftest', 'crashtest'): raise Exception('None or unrecognized reftest suite type.') # Find the manifest file if not test_file: if suite == 'reftest': test_file = mozpack.path.join('layout', 'reftests') elif suite == 'crashtest': test_file = mozpack.path.join('testing', 'crashtest') if not os.path.exists(os.path.join(self.topsrcdir, test_file)): test_file = mozpack.path.relpath(os.path.abspath(test_file), self.topsrcdir) manifest = self._find_manifest(suite, test_file) if not os.path.exists(mozpack.path.join(self.topsrcdir, manifest)): raise Exception('No manifest file was found at %s.' % manifest) # Need to chdir to reftest_dir otherwise imports fail below. os.chdir(self.reftest_dir) # The imp module can spew warnings if the modules below have # already been imported, ignore them. with warnings.catch_warnings(): warnings.simplefilter('ignore') import imp path = os.path.join(self.reftest_dir, 'runreftestb2g.py') with open(path, 'r') as fh: imp.load_module('reftest', fh, path, ('.py', 'r', imp.PY_SOURCE)) import reftest # Set up the reftest options. parser = reftest.B2GOptions() options, args = parser.parse_args([]) # Tests need to be served from a subdirectory of the server. Symlink # topsrcdir here to get around this. tests = os.path.join(self.reftest_dir, 'tests') if not os.path.isdir(tests): os.symlink(self.topsrcdir, tests) args.insert(0, os.path.join('tests', manifest)) for k, v in kwargs.iteritems(): setattr(options, k, v) if conditions.is_b2g_desktop(self): if self.substs.get('ENABLE_MARIONETTE') != '1': print(MARIONETTE_DISABLED % ('mochitest-b2g-desktop', self.mozconfig['path'])) return 1 options.profile = options.profile or os.environ.get('GAIA_PROFILE') if not options.profile: print(GAIA_PROFILE_NOT_FOUND % 'reftest-b2g-desktop') return 1 if os.path.isfile(os.path.join(options.profile, 'extensions', \ '*****@*****.**')): print(GAIA_PROFILE_IS_DEBUG % ('mochitest-b2g-desktop', options.profile)) return 1 options.desktop = True options.app = self.get_binary_path() if options.oop: options.browser_arg = '-oop' if not options.app.endswith('-bin'): options.app = '%s-bin' % options.app if not os.path.isfile(options.app): options.app = options.app[:-len('-bin')] return reftest.run_desktop_reftests(parser, options, args) try: which.which('adb') except which.WhichError: # TODO Find adb automatically if it isn't on the path raise Exception(ADB_NOT_FOUND % ('%s-remote' % suite, b2g_home)) options.b2gPath = b2g_home options.logdir = self.reftest_dir options.httpdPath = os.path.join(self.topsrcdir, 'netwerk', 'test', 'httpserver') options.xrePath = xre_path options.ignoreWindowSize = True # Don't enable oop for crashtest until they run oop in automation if suite == 'reftest': options.oop = True return reftest.run_remote_reftests(parser, options, args)
def checkIfGitExecutableExists(self): gitPath = which(u"git") if gitPath: return True else: return False
def build(self, what=None, pymake=False, disable_extra_make_dependencies=None, jobs=0, verbose=False): import which from mozbuild.controller.building import BuildMonitor from mozbuild.util import resolve_target_to_make self.log_manager.register_structured_logger(logging.getLogger('mozbuild')) warnings_path = self._get_state_filename('warnings.json') monitor = self._spawn(BuildMonitor) monitor.init(warnings_path) with BuildOutputManager(self.log_manager, monitor) as output: monitor.start() if what: top_make = os.path.join(self.topobjdir, 'Makefile') if not os.path.exists(top_make): print('Your tree has not been configured yet. Please run ' '|mach build| with no arguments.') return 1 # Collect target pairs. target_pairs = [] for target in what: path_arg = self._wrap_path_argument(target) make_dir, make_target = resolve_target_to_make(self.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 # See bug 886162 - we don't want to "accidentally" build # the entire tree (if that's really the intent, it's # unlikely they would have specified a directory.) if not make_dir and not make_target: print("The specified directory doesn't contain a " "Makefile and the first parent with one is the " "root of the tree. Please specify a directory " "with a Makefile or run |mach build| if you " "want to build the entire tree.") return 1 target_pairs.append((make_dir, make_target)) # Possibly add extra make depencies using dumbmake. if not disable_extra_make_dependencies: from dumbmake.dumbmake import (dependency_map, add_extra_dependencies) depfile = os.path.join(self.topsrcdir, 'build', 'dumbmake-dependencies') with open(depfile) as f: dm = dependency_map(f.readlines()) new_pairs = list(add_extra_dependencies(target_pairs, dm)) self.log(logging.DEBUG, 'dumbmake', {'target_pairs': target_pairs, 'new_pairs': new_pairs}, 'Added extra dependencies: will build {new_pairs} ' + 'instead of {target_pairs}.') target_pairs = new_pairs # Build target pairs. for make_dir, make_target in target_pairs: # We don't display build status messages during partial # tree builds because they aren't reliable there. This # could potentially be fixed if the build monitor were more # intelligent about encountering undefined state. status = self._run_make(directory=make_dir, target=make_target, line_handler=output.on_line, log=False, print_directory=False, ensure_exit_code=False, num_jobs=jobs, silent=not verbose, append_env={b'NO_BUILDSTATUS_MESSAGES': b'1'}, force_pymake=pymake) if status != 0: break else: monitor.start_resource_recording() status = self._run_make(srcdir=True, filename='client.mk', line_handler=output.on_line, log=False, print_directory=False, allow_parallel=False, ensure_exit_code=False, num_jobs=jobs, silent=not verbose, force_pymake=pymake) self.log(logging.WARNING, 'warning_summary', {'count': len(monitor.warnings_database)}, '{count} compiler warnings present.') monitor.finish(record_usage=status==0) high_finder, finder_percent = monitor.have_high_finder_usage() if high_finder: print(FINDER_SLOW_MESSAGE % finder_percent) if monitor.elapsed > 300: # Display a notification when the build completes. # This could probably be uplifted into the mach core or at least # into a helper API. It is here as an experimentation to see how it # is received. try: if sys.platform.startswith('darwin'): notifier = which.which('terminal-notifier') self.run_process([notifier, '-title', 'Mozilla Build System', '-group', 'mozbuild', '-message', 'Build complete'], ensure_exit_code=False) except which.WhichError: pass except Exception as e: self.log(logging.WARNING, 'notifier-failed', {'error': e.message}, 'Notification center failed: {error}') if status: return status long_build = monitor.elapsed > 600 if long_build: print('We know it took a while, but your build finally finished successfully!') else: print('Your build was successful!') if monitor.have_resource_usage: print('To view resource usage of the build, run |mach ' 'resource-usage|.') # Only for full builds because incremental builders likely don't # need to be burdened with this. if not what: # Fennec doesn't have useful output from just building. We should # arguably make the build action useful for Fennec. Another day... if self.substs['MOZ_BUILD_APP'] != 'mobile/android': app_path = self.get_binary_path('app') print('To take your build for a test drive, run: %s' % app_path) app = self.substs['MOZ_BUILD_APP'] if app in ('browser', 'mobile/android'): print('For more information on what to do now, see ' 'https://developer.mozilla.org/docs/Developer_Guide/So_You_Just_Built_Firefox') return status
def notify(self, msg): """Show a desktop notification with the supplied message On Linux and Mac, this will show a desktop notification with the message, but on Windows we can only flash the screen. """ moz_nospam = os.environ.get('MOZ_NOSPAM') if moz_nospam: return try: if sys.platform.startswith('darwin'): try: notifier = which.which('terminal-notifier') except which.WhichError: raise Exception('Install terminal-notifier to get ' 'a notification when the build finishes.') self.run_process([ notifier, '-title', 'Mozilla Build System', '-group', 'mozbuild', '-message', msg ], ensure_exit_code=False) elif sys.platform.startswith('linux'): try: notifier = which.which('notify-send') except which.WhichError: raise Exception( 'Install notify-send (usually part of ' 'the libnotify package) to get a notification when ' 'the build finishes.') self.run_process([ notifier, '--app-name=Mozilla Build System', 'Mozilla Build System', msg ], ensure_exit_code=False) elif sys.platform.startswith('win'): from ctypes import Structure, windll, POINTER, sizeof from ctypes.wintypes import DWORD, HANDLE, WINFUNCTYPE, BOOL, UINT class FLASHWINDOW(Structure): _fields_ = [("cbSize", UINT), ("hwnd", HANDLE), ("dwFlags", DWORD), ("uCount", UINT), ("dwTimeout", DWORD)] FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW)) FlashWindowEx = FlashWindowExProto( ("FlashWindowEx", windll.user32)) FLASHW_CAPTION = 0x01 FLASHW_TRAY = 0x02 FLASHW_TIMERNOFG = 0x0C # GetConsoleWindows returns NULL if no console is attached. We # can't flash nothing. console = windll.kernel32.GetConsoleWindow() if not console: return params = FLASHWINDOW( sizeof(FLASHWINDOW), console, FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG, 3, 0) FlashWindowEx(params) except Exception as e: self.log(logging.WARNING, 'notifier-failed', {'error': e.message}, 'Notification center failed: {error}')
def run_b2g_test(self, test_file=None, b2g_home=None, xre_path=None, total_chunks=None, this_chunk=None, no_window=None, **kwargs): """Runs a b2g mochitest. test_file is a path to a test file. It can be a relative path from the top source directory, an absolute filename, or a directory containing test files. """ # Need to call relpath before os.chdir() below. test_path = '' if test_file: test_path = self._wrap_path_argument(test_file).relpath() # TODO without os.chdir, chained imports fail below os.chdir(self.mochitest_dir) # The imp module can spew warnings if the modules below have # already been imported, ignore them. with warnings.catch_warnings(): warnings.simplefilter('ignore') import imp path = os.path.join(self.mochitest_dir, 'runtestsb2g.py') with open(path, 'r') as fh: imp.load_module('mochitest', fh, path, ('.py', 'r', imp.PY_SOURCE)) import mochitest from mochitest_options import B2GOptions parser = B2GOptions() options = parser.parse_args([])[0] if test_path: test_root_file = mozpack.path.join(self.mochitest_dir, 'tests', test_path) if not os.path.exists(test_root_file): print('Specified test path does not exist: %s' % test_root_file) return 1 options.testPath = test_path elif conditions.is_b2g_desktop(self): options.testManifest = 'b2g-desktop.json' else: options.testManifest = 'b2g.json' for k, v in kwargs.iteritems(): setattr(options, k, v) options.noWindow = no_window options.totalChunks = total_chunks options.thisChunk = this_chunk options.symbolsPath = os.path.join(self.distdir, 'crashreporter-symbols') options.consoleLevel = 'INFO' if conditions.is_b2g_desktop(self): if self.substs.get('ENABLE_MARIONETTE') != '1': print(MARIONETTE_DISABLED % ('mochitest-b2g-desktop', self.mozconfig['path'])) return 1 options.profile = options.profile or os.environ.get('GAIA_PROFILE') if not options.profile: print(GAIA_PROFILE_NOT_FOUND % 'mochitest-b2g-desktop') return 1 if os.path.isfile(os.path.join(options.profile, 'extensions', \ '*****@*****.**')): print(GAIA_PROFILE_IS_DEBUG % ('mochitest-b2g-desktop', options.profile)) return 1 options.desktop = True options.app = self.get_binary_path() if not options.app.endswith('-bin'): options.app = '%s-bin' % options.app if not os.path.isfile(options.app): options.app = options.app[:-len('-bin')] return mochitest.run_desktop_mochitests(parser, options) try: which.which('adb') except which.WhichError: # TODO Find adb automatically if it isn't on the path print(ADB_NOT_FOUND % ('mochitest-remote', b2g_home)) return 1 options.b2gPath = b2g_home options.logcat_dir = self.mochitest_dir options.httpdPath = self.mochitest_dir options.xrePath = xre_path return mochitest.run_remote_mochitests(parser, options)
class StdLibsZone(object): """Singleton zone managing the db/stdlibs/... area. Because this is a singleton we shouldn't have to worry about locking to prevent corruption. """ _res_index = None # cix-path -> last-updated def __init__(self, db): self.db = db self.stdlibs_dir = join(dirname(dirname(__file__)), "stdlibs") self.base_dir = join(self.db.base_dir, "db", "stdlibs") self._stdlib_from_stdlib_ver_and_name = { } # cache of StdLib singletons self._vers_and_names_from_lang = { } # lang -> ordered list of (ver, name) def vers_and_names_from_lang(self, lang): "Returns an ordered list of (ver, name) for the given lang." # _vers_and_names_from_lang = { # "php": [ # ((4,3), "php-4.3"), # ((5.0), "php-5.0"), # ((5.1), "php-5.1"), # ((5.2), "php-5.2"), # ((5,3), "php-5.3") # ], # "ruby": [ # (None, "ruby"), # ], # ... # } vers_and_names = self._vers_and_names_from_lang.get(lang) if vers_and_names is None: # Find the available stdlibs for this language. cix_glob = join(self.stdlibs_dir, safe_lang_from_lang(lang) + "*.cix") cix_paths = glob(cix_glob) vers_and_names = [] for cix_path in cix_paths: name = splitext(basename(cix_path))[0] if '-' in name: base, ver_str = name.split('-', 1) ver = _ver_from_ver_str(ver_str) else: base = name ver = None if base.lower() != lang.lower(): # Only process when the base name matches the language. # I.e. skip if base is "python3" and lang is "python". continue vers_and_names.append((ver, name)) vers_and_names.sort() self._vers_and_names_from_lang[lang] = vers_and_names return vers_and_names @property def res_index(self): "cix-path -> last-updated" if self._res_index is None: idxpath = join(self.base_dir, "res_index") self._res_index = self.db.load_pickle(idxpath, {}) return self._res_index def save(self): if self._res_index is not None: self.db.save_pickle(join(self.base_dir, "res_index"), self._res_index) def cull_mem(self): """Cull memory usage as appropriate. This is a no-op for StdLibsZone because its memory use is bounded and doesn't really need culling. """ pass def reportMemory(self, reporter, closure=None): """ Report on memory usage from this StdLibZone. See nsIMemoryMultiReporter """ log.debug("StdLibZone: reporting memory") total_mem_usage = 0 for stdlib in self._stdlib_from_stdlib_ver_and_name.values(): total_mem_usage += stdlib.reportMemory(reporter, closure) return total_mem_usage def get_lib(self, lang, ver_str=None): """Return a view into the stdlibs zone for a particular language and version's stdlib. "lang" is the language, e.g. "Perl", for which to get a stdlib. "ver_str" (optional) is a specific version of the language, e.g. "5.8". On first get of a stdlib for a particular language, all available stdlibs for that lang are updated, if necessary. Returns None if there is not stdlib for this language. """ vers_and_names = self.vers_and_names_from_lang(lang) if not vers_and_names: return None if ver_str is None: # Default to the latest version. ver = vers_and_names[-1][0] else: ver = _ver_from_ver_str(ver_str) # Here is something like what we have for PHP: # vers_and_names = [ # (None, "php"), # ((4,0), "php-4.0"), # ((4,1), "php-4.1"), # ((4,2), "php-4.2"), # ((4,3), "php-4.3"), # ((5,0), "php-5.0"), # ((5,1), "php-5.1"), # ] # We want to (quickly) pick the best fit stdlib for the given # PHP version: # PHP (ver=None): php # PHP 3.0: php # PHP 4.0: php-4.0 (exact match) # PHP 4.0.2: php-4.0 (higher sub-version) # PHP 4.4: php-4.3 # PHP 6.0: php-5.1 key = (ver, "zzz") # 'zzz' > any stdlib name (e.g., 'zzz' > 'php-4.2') idx = max(0, bisect.bisect_right(vers_and_names, key) - 1) log.debug("best stdlib fit for %s ver=%s in %s is %s", lang, ver, vers_and_names, vers_and_names[idx]) stdlib_match = vers_and_names[idx] stdlib_ver, stdlib_name = stdlib_match if stdlib_match not in self._stdlib_from_stdlib_ver_and_name: # TODO: This _update_lang_with_ver method should really moved into # the StdLib class. self._update_lang_with_ver(lang, ver=stdlib_ver) stdlib = StdLib(self.db, join(self.base_dir, stdlib_name), lang, stdlib_name) self._stdlib_from_stdlib_ver_and_name[stdlib_match] = stdlib return self._stdlib_from_stdlib_ver_and_name[stdlib_match] def _get_preload_zip(self): return join(self.stdlibs_dir, "stdlibs.zip") def can_preload(self): """Return True iff can preload.""" if exists(self.base_dir): log.info("can't preload stdlibs: `%s' exists", self.base_dir) return False try: import process import which except ImportError, ex: log.info("can't preload stdlibs: %s", ex) return False try: which.which("unzip") except which.WhichError, ex: log.info("can't preload stdlibs: %s", ex) return False
def setCorePath(self, compiler=None, extra=None): if compiler is None: import which compiler = which.which("python") self.corePath = self._shellOutForPath(compiler)
def OnEditor(self, event): """ Method that edit the python code of associated devs model of the Block """ from Container import ShapeCanvas python_path = self.python_path model_path = os.path.dirname(python_path) name = os.path.basename(python_path) ### trying to get parent window mainW = GetActiveWindow(event) if isinstance(mainW, ShapeCanvas): mainW = mainW.GetParent() if not builtins.__dict__['LOCAL_EDITOR'] and not zipfile.is_zipfile(model_path) and not python_path.startswith('http'): dial = wx.MessageDialog(mainW, _('Do you want to use your local programmer software?\n\n If you always want use the DEVSimPy code editor\n change the option in Editor panel preferences.'), name, wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION) val = dial.ShowModal() else: val = wx.ID_NO ### if local editor if val == wx.ID_YES: ### open with local editor if wx.Platform == '__WXMAC__': subprocess.call(" ".join(['open',python_path]), shell=True) elif "wxMSW" in wx.PlatformInfo: os.startfile(python_path) elif "wxGTK" in wx.PlatformInfo: ### with gnome if os.system('pidof gedit') == 256: try: soft = which('gedit') except: sys.stdout.write(_("Local programmer software not found!\n")) else: subprocess.call(" ".join([soft,python_path]), shell=True) ### with kde elif os.system('pidof ksmserver') == 256: try: soft = which('kfmclient') except: sys.stdout.write(_("Local programmer software not found!\n")) else: os.system(soft+" openURL " + python_path) else: sys.stdout.write(_("Unknown Windows Manager!\n")) elif val != wx.ID_CANCEL: # loading file in DEVSimPy editor windows (self.text) try: editorFrame = Editor.GetEditor(None, wx.NewIdRef(), ''.join([name,' - ',model_path]), obj=self, file_type='block') # if zipfile.is_zipfile(model_path): # importer = zipimport.zipimporter(model_path) # text = importer.get_source(os.path.splitext(name)[0]) if not zipfile.is_zipfile(model_path): ### if file is localized on the net if python_path.startswith('http'): ### with internet python file, the editorFrame is read only editorFrame.SetReadOnly(True) printOnStatusBar(editorFrame.statusbar, {0:_('read only')}) ### parse url to extract the path(/devsimpy/domain...) and the network location (lcapocchi.free.fr) o = urlparse(python_path) ### open connection c = httplib.HTTPConnection(o.netloc) ### request with GET mode c.request('GET', o.path) ### get response of request r = c.getresponse() ### convert file into string text = r.read() else: ### if python_path is not found (because have an external origin) if not os.path.exists(python_path): if os.path.basename(DOMAIN_PATH) in python_path.split(os.sep): python_path = os.path.join(HOME_PATH, python_path[python_path.index(os.path.basename(DOMAIN_PATH)):].strip('[]')) self.python_path = python_path # ### only with python 2.6 # with codecs.open(python_path, 'r', 'utf-8') as f: # text = f.read() name = os.path.basename(python_path) editorFrame.AddEditPage(name, python_path) editorFrame.Show() printOnStatusBar(editorFrame.statusbar,{1:''}) return editorFrame except Exception as info: dlg = wx.MessageDialog(mainW, _('Editor frame not instanciated: %s\n'%info), name, wx.OK|wx.ICON_ERROR) dlg.ShowModal() return False
def lint_with_text(self, request, text): try: prefset = request.prefset linterPrefName = "%sLinterType" % self.cmd scssLinterType = prefset.getStringPref(linterPrefName) if scssLinterType == "none": return if scssLinterType == "builtin": return KoCSSLinter().lint_with_text(request, text) interpreterPrefName = "%sDefaultInterpreter" % self.cmd scssPath = prefset.getStringPref(interpreterPrefName) # The 'or' part handles any language for "Find on Path" if (not scssPath) or not os.path.exists(scssPath): try: scssPath = which.which(self.cmd) except which.WhichError: pass if not scssPath or not os.path.exists(scssPath): log.warn("Setting %sLinterType to 'default': %s not found", self.cmd, self.cmd) prefset.setStringPref(linterPrefName, "builtin") return KoCSSLinter().lint_with_text(request, text) else: prefset.setStringPref(interpreterPrefName, scssPath) rubyPath = prefset.getStringPref("rubyDefaultInterpreter") if (not rubyPath) or not os.path.exists(rubyPath): try: rubyPath = which.which("ruby") except which.WhichError: pass if (not rubyPath) or not os.path.exists(rubyPath): log.warn( "Setting %s to 'default': no ruby found to drive %s", linterPrefName, self.cmd) prefset.setStringPref(linterPrefName, "builtin") return KoCSSLinter.lint_with_text(self, request, text) else: prefset.setStringPref("rubyDefaultInterpreter", rubyPath) # Run scss tmpfilename = tempfile.mktemp() + '.' + self.cmd fout = open(tmpfilename, 'wb') fout.write(text) fout.close() textlines = text.splitlines() cmd = [rubyPath, scssPath, "-c", tmpfilename] #koLintResult.insertNiceness(cmd) cwd = request.cwd or None # We only need the stderr result. try: p = process.ProcessOpen(cmd, cwd=cwd, env=koprocessutils.getUserEnv(), stdin=None) stderr = p.communicate()[1] warnLines = stderr.splitlines(0) # Don't need the newlines. except: warnLines = [] finally: os.unlink(tmpfilename) except: log.exception("scss: lint_with_text: big fail") warnLines = [] results = koLintResults() prevLine = "" for line in warnLines: m = self._scss_emsg_ptn.match(line) if m: lineNo = int(m.group(1)) m2 = self._syntaxErrorPtn.match(prevLine) if m2: severity = koLintResult.SEV_ERROR msg = m2.group(1) else: severity = koLintResult.SEV_WARNING msg = prevLine desc = self.cmd + ": " + msg koLintResult.createAddResult(results, textlines, severity, lineNo, desc) else: prevLine = line return results
def mkenvconf(): log.debug("mkenvconf: start") # This should only be run via the setenv scripts and only from the # top-level codeintel dir. Verify the latter, at least. landmark = join("lib", "codeintel2", "citadel.py") if not exists(landmark): raise Error("support/mkenvconf.py should only be run via the " "bin/setenv.{bat|sh} scripts and only from the " "top-level codeintel dir: `%s' landmark does not exist" % landmark) envconf = [] CODEINTEL_SRC = abspath(os.getcwd()) python = get_config().python # Put 'bin' on PATH. Also, if not already first, put the configured # python first on PATH. paths = [] if not which.which("python").lower() == python.lower(): paths.append(dirname(python)) paths.append(join(CODEINTEL_SRC, "bin")) if sys.platform == "win32": envconf.append("set PATH=%s;%%PATH%%" % os.pathsep.join(paths)) else: envconf.append("export PATH=%s:$PATH" % os.pathsep.join(paths)) # Put necessary dirs on PYTHONPATH. komodo_src_dir = normpath(join(CODEINTEL_SRC, "..", "..")) pythonpaths = [ join(CODEINTEL_SRC, "lib"), join(CODEINTEL_SRC, "support"), # 'python-sitelib' needed for XML completion support stuff # and textinfo/langinfo system join(komodo_src_dir, "src", "python-sitelib"), # 'find' needed for findlib2.py join(komodo_src_dir, "src", "find"), # 'util' needed for testlib.py join(komodo_src_dir, "util"), join(komodo_src_dir, "contrib" ,"zope" ,"cachedescriptors" ,"src"), ] # Make the zope directory a package. if not exists(join(komodo_src_dir, "contrib" ,"zope" ,"__init__.py")): file(join(komodo_src_dir, "contrib" ,"zope" ,"__init__.py"), "w").write("") udl_skel_dir = join(komodo_src_dir, "src", "udl", "skel") for d in os.listdir(udl_skel_dir): pylib_dir = join(udl_skel_dir, d, "pylib") if exists(pylib_dir): pythonpaths.append(pylib_dir) if sys.platform == "win32": # Also need the winprocess module on Windows. pythonpaths.append(join(komodo_src_dir, "contrib", "smallstuff")) envconf.append("set PYTHONPATH=%s;%%PYTHONPATH%%" % os.pathsep.join(pythonpaths)) else: envconf.append(""" if [ -z "$PYTHONPATH" ]; then export PYTHONPATH=%s else export PYTHONPATH=%s:$PYTHONPATH fi """ % (os.pathsep.join(pythonpaths), os.pathsep.join(pythonpaths))) # On Windows, determine which compiler is appropriate for the configured # Python and call the appropriate setup script for that compiler. if sys.platform == "win32": msvc_ver = _capture_python_output(python, "import distutils.msvccompiler as c; " "print c.get_build_version()").strip() if msvc_ver == "6": msvc_setenv = r"C:\Program Files\Microsoft Visual Studio\VC98\bin\VCVARS32.BAT" elif msvc_ver.startswith("7"): msvc_setenv = r"C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" elif msvc_ver.startswith("8"): msvc_setenv = r"C:\Program Files\Microsoft Visual Studio 8\VC\bin\vcvars32.bat" elif msvc_ver.startswith("9"): msvc_setenv = r"C:\Program Files\Microsoft Visual Studio 9.0\VC\bin\vcvars32.bat" elif msvc_ver.startswith("11"): msvc_setenv = r"C:\Program Files\Microsoft Visual Studio 11.0\VC\bin\vcvars32.bat" else: raise Error("unrecognized MSVC version used to build your " "configured python: '%s' (python='%s')" % (msvc_ver, python)) if not exists(msvc_setenv): # Try the x86 version, on Windows 7 it might be under: # C:\Program Files (x86)\ alt_msvc_setenv = msvc_setenv.replace("Program Files", "Program Files (x86)") if exists(alt_msvc_setenv): msvc_setenv = alt_msvc_setenv else: raise Error("environ script for MSVC %s does not exist: '%s'" % (msvc_ver, msvc_setenv)) envconf.append('call "%s"' % msvc_setenv) # Setup some convenience aliases non-Windows. if sys.platform != "win32": envconf.append("alias ci2='python %s/ci2.py'" % CODEINTEL_SRC) envconf.append("alias mk='python Makefile.py'") # Write out the envconf to 'tmp/envconf.{bat|sh}'. tmp_dir = "tmp" if not exists(tmp_dir): log.debug("mkdir `%s'", tmp_dir) os.makedirs(tmp_dir) if sys.platform == "win32": envconf_path = join(tmp_dir, "envconf.bat") else: envconf_path = join(tmp_dir, "envconf.sh") if exists(envconf_path): log.debug("rm `%s'", envconf_path) os.remove(envconf_path) log.info("create `%s'", envconf_path) fout = open(envconf_path, 'w') try: fout.write('\n\n'.join(envconf)) finally: fout.close()
def which_or_None(str): import which try: return which.which(str) except which.WhichError: return None
def __init__(self, lasagna, parent=None): super(plugin, self).__init__( lasagna ) #This calls the lasagna_plugin constructor which in turn calls subsequent constructors #Is the Elastix binary in the system path? if which('elastix') is None: #TODO: does not stop properly. Have to uncheck and recheck the plugin menu item to get it to run a second time. from alert import alert self.alert = alert( lasagna, 'The elastix binary does not appear to be in your path.<br>Not starting plugin.' ) self.lasagna.pluginActions[self.__module__].setChecked( False ) #Uncheck the menu item associated with this plugin's name self.deleteLater() return else: print("Using elastix binary at " + which('elastix')) #re-define some default properties that were originally defined in lasagna_plugin self.pluginShortName = 'Elastix' #Appears on the menu self.pluginLongName = 'registration of images' #Can be used for other purposes (e.g. tool-tip) self.pluginAuthor = 'Rob Campbell' #This is the file name we monitor during running self.elastixLogName = 'elastix.log' #Create widgets defined in the designer file self.setupUi(self) self.show() #The dictionary which will store the command components #the param files and output are separate as they are stored in the list view and ouput path text edit box self.elastix_cmd = { 'f': '', #fixed image 'm': '' #moving image } #A dictionary for storing location of temporary parameter files #Temporary parameter files are created as the user edits them and are #removed when the registration starts self.tmpParamFiles = {} #Create some properties which we will need self.fixedStackPath = '' #absolute path to reference image self.movingStackPath = '' #absolute path to sample image #Set up the list view on Tab 2 self.paramItemModel = QtGui.QStandardItemModel(self.paramListView) self.paramListView.setModel(self.paramItemModel) #Link signals to slots #Tab 1 - Loading data self.loadFixed.released.connect(self.loadFixed_slot) self.loadMoving.released.connect(self.loadMoving_slot) self.originalMovingImage = None #The original moving image is stored here self.originalMovingFname = None #Flip axis self.flipAxis1.released.connect(lambda: self.flipAxis_Slot(0)) self.flipAxis2.released.connect(lambda: self.flipAxis_Slot(1)) self.flipAxis3.released.connect(lambda: self.flipAxis_Slot(2)) #Rotate axis self.rotAxis1.released.connect(lambda: self.rotAxis_Slot(0)) self.rotAxis2.released.connect(lambda: self.rotAxis_Slot(1)) self.rotAxis3.released.connect(lambda: self.rotAxis_Slot(2)) #Swap axes self.swapAxis1_2.released.connect(lambda: self.swapAxis_Slot(0, 1)) self.swapAxis2_3.released.connect(lambda: self.swapAxis_Slot(1, 2)) self.swapAxis3_1.released.connect(lambda: self.swapAxis_Slot(2, 0)) self.saveModifiedMovingStack.released.connect( self.saveModifiedMovingStack_slot) #Tab 2 - Building the registration command self.outputDirSelect_button.released.connect(self.selectOutputDir_slot) self.removeParameter.released.connect(self.removeParameter_slot) self.loadParamFile.released.connect(self.loadParamFile_slot) self.moveParamUp_button.released.connect(self.moveParamUp_button_slot) self.moveParamDown_button.released.connect( self.moveParamDown_button_slot) #Tab 3 - parameter file self.plainTextEditParam.textChanged.connect( self.plainTextEditParam_slot) self.comboBoxParam.activated.connect( self.comboBoxParamLoadOnSelect_slot) #Tab 4: running self.runElastix_button.released.connect(self.runElastix_button_slot) self.runElastix_button.setEnabled(False) #Start a QTimer to poll for finished analyses self.finishedMonitorTimer = QtCore.QTimer() self.finishedMonitorTimer.timeout.connect(self.analysisFinished_slot) self.finishedMonitorTimer.start( 2500) #Number of milliseconds between poll events self.listofDirectoriesWithRunningAnalyses = [] #Set up list view on the running tab self.runningAnalysesItemModel = QtGui.QStandardItemModel( self.runningRegistrations_ListView) self.runningRegistrations_ListView.setModel( self.runningAnalysesItemModel) #Tab 5: results self.resultsItemModel = QtGui.QStandardItemModel( self.registrationResults_ListView) self.registrationResults_ListView.setModel(self.resultsItemModel) self.registrationResults_ListView.clicked.connect( self.resultImageClicked_Slot) self.resultImages_Dict = { } #the keys are result image file names and the values are the result images self.showHighlightedResult_radioButton.toggled.connect( self.overlayRadioButtons_Slot) self.showOriginalMovingImage_radioButton.toggled.connect( self.overlayRadioButtons_Slot) #Clear all image stacks self.lasagna.removeIngredientByType('imagestack') #------------------------------------------------------------------------------------- #The following will either be hugely changed or deleted when the plugin is no longer #under heavy development debug = False #runs certain things quickly to help development if debug and os.path.expanduser( "~" ) == '/home/rob': #Ensure only I can trigger this. Ensures that it doesn't activate if I accidently push with debug enabled self.fixedStackPath = '/mnt/data/TissueCyte/registrationTests/regPipelinePrototype/YH84_150507_target.mhd' self.movingStackPath = '/mnt/data/TissueCyte/registrationTests/regPipelinePrototype/YH84_150507_moving.mhd' doRealLoad = True if doRealLoad: self.loadFixed_slot(self.fixedStackPath) self.loadMoving_slot(self.movingStackPath) self.lasagna.initialiseAxes() doParamFile = True if doParamFile: #load param file list paramFiles = [ '/mnt/data/TissueCyte/registrationTests/regPipelinePrototype/Par0000affine.txt', '/mnt/data/TissueCyte/registrationTests/regPipelinePrototype/Par0000bspline.txt' ] paramFiles = [ '/mnt/data/TissueCyte/registrationTests/regPipelinePrototype/Par0000affine_quick.txt' ] self.loadParamFile_slot(paramFiles) self.outputDir_label.setText( self.absToRelPath( '/mnt/data/TissueCyte/registrationTests/regPipelinePrototype/reg2' )) self.updateWidgets_slot() self.tabWidget.setCurrentIndex(3)
def lint_with_text(self, request, text): try: prefset = request.prefset lessLinterType = prefset.getStringPref("lessLinterType") if lessLinterType == "none": return if lessLinterType == "builtin": return KoCSSLinter.lint_with_text(self, request, text) lessPath = prefset.getStringPref("lessDefaultInterpreter") # The 'or' part handles any language for "Find on Path" if (not lessPath) or not os.path.exists(lessPath): try: lessPath = which.which("lessc") except which.WhichError: pass if (not lessPath) or not os.path.exists(lessPath): log.warn( "Setting lessLinterType to 'default': less not found") prefset.setStringPref("lessLinterType", "builtin") return KoCSSLinter.lint_with_text(self, request, text) else: prefset.setStringPref("lessDefaultInterpreter", lessPath) nodePath = prefset.getStringPref("nodejsDefaultInterpreter") if (not nodePath) or not os.path.exists(nodePath): try: nodePath = which.which("node") except which.WhichError: pass if (not nodePath) or not os.path.exists(nodePath): log.warn( "Setting lessLinterType to 'default': no node found to drive less" ) prefset.setStringPref("lessLinterType", "builtin") return KoCSSLinter.lint_with_text(self, request, text) else: prefset.setStringPref("nodejsDefaultInterpreter", nodePath) # Run less tmpfilename = tempfile.mktemp() + '.less' fout = open(tmpfilename, 'wb') fout.write(text) fout.close() textlines = text.splitlines() cmd = [nodePath, lessPath, "--no-color", tmpfilename] #koLintResult.insertNiceness(cmd) cwd = request.cwd or None # We only need the stderr result. try: p = process.ProcessOpen(cmd, cwd=cwd, env=koprocessutils.getUserEnv(), stdin=None) stderr = p.communicate()[1] warnLines = stderr.splitlines(0) # Don't need the newlines. except: warnLines = [] finally: os.unlink(tmpfilename) except: log.exception("less: lint_with_text: big fail") warnLines = [] # They're all errors for this checker # (and they all say "Syntax Checker!") # (at least version 1.3.0 of the LESS Compiler does). severity = koLintResult.SEV_ERROR results = koLintResults() for line in warnLines: m = self._less_emsg_ptn.match(line) if m: lineNo = int(m.group(2)) desc = m.group(1) column = int(m.group(3)) koLintResult.createAddResult(results, textlines, severity, lineNo, desc, columnStart=column) else: m = self._less_emsg_ptn_old.match(line) if m: lineNo = int(m.group(2)) desc = m.group(1) koLintResult.createAddResult(results, textlines, severity, lineNo, desc) return results