def update_environ(): try: environ = os.environ.copy() env = settings().get('environ', None) if env and isinstance(env, dict): for key, value in env.items(): if value and isinstance(value, list): pathstring = environ.get(key, None) items = list(filter(None, map(expand_path, value))) if items: if pathstring: paths = pathstring.split(pathsep) for item in items: item = normpath(item) ismatch = False for path in paths: if path == item: ismatch = True if not ismatch: paths.append(item) environ[key] = pathsep.join(paths) else: environ[key] = pathsep.join(map(normpath, items)) return environ except Exception as error: log.warning('Could not clone system environment: %s', error) return None
def test_activate_root_env_from_other_env(shell): shell_vars = _format_vars(shell) with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (shell_vars['command_setup'] + """ {source} "{syspath}{cmd_path}activate" "{env_dirs[0]}" {nul} {source} "{syspath}{cmd_path}activate" root {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs, shell), **shell_vars) stdout, stderr = run_in(commands, shell) assert_in(shells[shell]["path_to"](pathsep.join(_envpaths(root_dir))), stdout) assert_not_in(shells[shell]["path_to"](pathsep.join(_envpaths(envs, 'test1'))), stdout)
def test_single_path(self): auto_instrumentation.run() self.assertEqual( environ["PYTHONPATH"], pathsep.join([self.auto_instrumentation_path, getcwd(), "abc"]), )
def test_empty(self): auto_instrumentation.run() self.assertEqual( environ["PYTHONPATH"], pathsep.join([self.auto_instrumentation_path, getcwd()]), )
def test_find_node_modules_basedir(self): driver = base.BaseDriver() # ensure that NODE_PATH is initially None driver.node_path = None driver.working_dir = mkdtemp(self) # initially should be empty, since no node_modules in either # directories that it should check self.assertEqual([], driver.find_node_modules_basedir()) # having the NODE_PATH defined will result in such p1 = mkdtemp(self) p2 = mkdtemp(self) driver.node_path = pathsep.join([p1, p2]) self.assertEqual([p1, p2], driver.find_node_modules_basedir()) # create the node_modules in the working directory defined for # the driver instance, and unset NODE_PATH driver.node_path = None dwd_wd_nm = join(driver.working_dir, 'node_modules') os.mkdir(dwd_wd_nm) self.assertEqual([dwd_wd_nm], driver.find_node_modules_basedir()) # combine the two, they should be in this order, where the # working directory has higher precedence over NODE_PATH driver.node_path = p1 self.assertEqual([dwd_wd_nm, p1], driver.find_node_modules_basedir())
def test_karma_runtime_multiple_artifacts_single_arg(self): stub_stdouts(self) extra_artifact = join(mkdtemp(self), 'lib.js') with open(extra_artifact, 'w') as fd: fd.write(dedent(""" 'use strict'; var Lib = function(args) { }; Lib.prototype.add2 = function (i) { return i + i; }; """)) # use the full blown runtime rt = KarmaRuntime(self.driver) # the artifact in our case is identical to the source file artifact = resource_filename('calmjs.dev', 'main.js') rt([ 'run', '--artifact', pathsep.join([artifact, extra_artifact]), '--test-registry', 'calmjs.dev.module.tests', '--test-with-package', 'calmjs.dev', '-vv', ]) logs = sys.stderr.getvalue() self.assertIn("specified artifact '%s' found" % artifact, logs) self.assertIn("specified artifact '%s' found" % extra_artifact, logs)
def add_to_win32_PATH(script_fpath, *add_path_list): r""" Writes a registery script to update the PATH variable into the sync registry CommandLine: python -m utool.util_win32 --test-add_to_win32_PATH --newpath "C:\Program Files (x86)\Graphviz2.38\bin" Example: >>> # SCRIPT >>> from utool.util_win32 import * # NOQA >>> script_fpath = join(ut.truepath('~'), 'Sync/win7/registry', 'UPDATE_PATH.reg') >>> new_path = ut.get_argval('--newpath', str, default=None) >>> result = add_to_win32_PATH(script_fpath, new_path) >>> print(result) """ import utool as ut write_dir = dirname(script_fpath) key = '[HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\Session Manager\Environment]' rtype = 'REG_EXPAND_SZ' # Read current PATH values win_pathlist = list(os.environ['PATH'].split(os.path.pathsep)) new_path_list = ut.unique_ordered(win_pathlist + list(add_path_list)) #new_path_list = unique_ordered(win_pathlist, rob_pathlist) print('\n'.join(new_path_list)) pathtxt = pathsep.join(new_path_list) varval_list = [('Path', pathtxt)] regfile_str = make_regfile_str(key, varval_list, rtype) ut.view_directory(write_dir) print(regfile_str) ut.writeto(script_fpath, regfile_str, mode='wb') print('Please have an admin run the script. You may need to restart')
def call_pyang(args): """invoke pyang script with plugin path adjusted using auto-discovery.""" environ['PYANG_PLUGINPATH'] = pathsep.join(expanded()) proc = Popen(['pyang'] + list(args), stdout=sys.stdout, stderr=sys.stderr) proc.wait() return proc.returncode
def link(self, spec): """ Basically link everything up as a bundle, as if statically linking everything into "binary" file. """ node_path = pathsep.join(self.find_node_modules_basedir()) if not node_path: logger.warning( 'no valid node_modules found - webpack may fail to locate ' 'itself.') # TODO allow to (un)set option flags such as --display-reasons args = (spec[self.webpack_bin_key], '--display-modules', '--display-reasons', '--config', spec['webpack_config_js']) logger.info('invoking NODE_PATH=%r %s %s %s %s %s', node_path, *args) # note that webpack treats the configuration as an executable # node.js program - so that it will need to be able to import # (require) webpack - explicitly have to provide the one located # or associated with this toolchain instance, i.e. the one at # the current directory rc = call(args, env=webpack_env(node_path)) if rc != 0: logger.error("webpack has encountered a fatal error") raise WebpackExitError(rc, spec[self.webpack_bin_key])
def update_python_path() -> None: '''Retrieve existing PYTHONPATH''' python_path = environ.get("PYTHONPATH") # Split the paths if not python_path: python_path = [] else: python_path = python_path.split(pathsep) # Get the current working directory cwd_path = getcwd() # If this directory is already in python_path, remove it. python_path = [path for path in python_path if path != cwd_path] # Add CWD to the front. python_path.insert(0, cwd_path) # What is the directory containing this python file? filedir_path = dirname(abspath(__file__)) # If this directory is already in python_path, remove it. python_path = [path for path in python_path if path != filedir_path] # Add this directory to the front python_path.insert(0, filedir_path) # Reset PYTHONPATH environment variable environ["PYTHONPATH"] = pathsep.join(python_path)
def olc(args): """offline compile a list of methods See Patterns below for a description of the format expected for "patterns..." The output traced by this command is not guaranteed to be the same as the output for a compilation performed at runtime. The code produced by a compiler is sensitive to the compilation context such as what classes have been resolved etc. Use "mx olc -help" to see what other options this command accepts. --- Patterns --- {0}""" i = 0 insCP = [] olcArgs = [] while i < len(args): arg = args[i] if arg in ['-cp', '-classpath']: insCP += [mx.expand_project_in_class_path_arg(args[i + 1])] i += 1 else: olcArgs += [arg] i += 1 insCP = pathsep.join(insCP) mx.run_java([ '-ea', '-esa', '-cp', mx.classpath() + pathsep + insCP, 'com.oracle.max.vm.ext.maxri.Compile' ] + olcArgs)
def add_to_path(*segments): old_path = environ.get('PATH') if old_path: segments = list(segments) + [old_path] environ['PATH'] = pathsep.join(segments) yield environ['PATH'] = old_path
def run() -> None: python_path = environ.get("PYTHONPATH") if not python_path: python_path = [] else: python_path = python_path.split(pathsep) cwd_path = getcwd() # This is being added to support applications that are being run from their # own executable, like Django. # FIXME investigate if there is another way to achieve this if cwd_path not in python_path: python_path.insert(0, cwd_path) filedir_path = dirname(abspath(__file__)) python_path = [path for path in python_path if path != filedir_path] python_path.insert(0, filedir_path) environ["PYTHONPATH"] = pathsep.join(python_path) executable = which(argv[1]) execl(executable, executable, *argv[2:])
def __init__(self) -> None: dirs = AppDirs(appname='axuy', appauthor=False, multipath=True) parents = dirs.site_config_dir.split(pathsep) parents.append(dirs.user_config_dir) filenames = [pathjoin(parent, 'settings.ini') for parent in parents] # Parse configuration files self.config = ConfigParser() self.config.read(SETTINGS) self.config.read(filenames) self.fallback() # Parse command-line arguments self.options = ArgumentParser(usage='%(prog)s [options]', formatter_class=RawTextHelpFormatter) self.options.add_argument('-v', '--version', action='version', version='Axuy {}'.format(__version__)) self.options.add_argument( '--write-config', nargs='?', const=stdout, type=FileType('w'), metavar='PATH', dest='cfgout', help='write default config to PATH (fallback: stdout) and exit') self.options.add_argument( '-c', '--config', metavar='PATH', help='location of the configuration file (fallback: {})'.format( pathsep.join(filenames))) self.options.add_argument( '--host', help='host to bind this peer to (fallback: {})'.format(self.host)) self.options.add_argument( '-p', '--port', type=int, help='port to bind this peer to (fallback: {})'.format(self.port)) self.options.add_argument( '-s', '--seeder', metavar='ADDRESS', help='address of the peer that created the map')
def test_activate_root_env_from_other_env(shell): shell_vars = _format_vars(shell) with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (shell_vars['command_setup'] + """ {source} "{syspath}{cmd_path}activate" "{env_dirs[0]}" {nul} {source} "{syspath}{cmd_path}activate" root {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs, shell), **shell_vars) stdout, stderr = run_in(commands, shell) assert_in(shells[shell]["path_to"](pathsep.join(_envpaths(root_dir))), stdout) assert_not_in( shells[shell]["path_to"](pathsep.join(_envpaths(envs, 'test1'))), stdout)
def add_paths(key, targets): paths = get_paths(key) added_paths = [ target for target in map(expandvars, targets) if target not in paths and isdir(target) ] if not added_paths: return command_export(key, pathsep.join(added_paths + paths))
def verify_file_matches_repo_root(result, *file): """ Assert that a generated file matches the one with the identical name in the project repository root. """ mother_file = REPO_ROOT_PATH.join(*file).strpath generated_file = result.project.join(*file).strpath assert compare_files(mother_file, generated_file), \ "Mother project '{}' not matching template.\n {} != {}".format( pathsep.join(file), mother_file, generated_file)
def test_activate_root_env_from_other_env(): for shell in shells: with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (command_setup + """ {source} {syspath}{binpath}activate "{env_dirs[0]}" {nul} {source} {syspath}{binpath}activate root {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs), **_format_vars) stdout, stderr = run_in(commands, shell) assert_equals(stdout, u"%s" % pathsep.join(_envpaths(root_dir) + [BASE_PATH, ]), stderr)
def test_activate_bad_env_keeps_existing_good_env(shell): shell_vars = _format_vars(shell) with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (shell_vars['command_setup'] + """ {source} {syspath}{cmd_path}activate "{env_dirs[0]}" {nul} {source} "{syspath}{cmd_path}activate" "{env_dirs[2]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs, shell), **shell_vars) stdout, stderr = run_in(commands, shell) assert_in(pathsep.join(_envpaths(envs, 'test1')),shells[shell]["path_from"](stdout))
def test_activate_bad_env_keeps_existing_good_env(): for shell in shells: with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (command_setup + """ {source} {syspath}{binpath}activate "{env_dirs[0]}" {nul} {source} {syspath}{binpath}activate "{env_dirs[2]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs), **_format_vars) stdout, stderr = run_in(commands, shell) assert_equals(stdout, pathsep.join(_envpaths(envs, 'test1')) + pathsep + BASE_PATH, stderr)
def test_activate_test1(): for shell in shells: with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (command_setup + """ {source} {syspath}{binpath}activate{shell_suffix} "{env_dirs[0]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs), **_format_vars) stdout, stderr = run_in(commands, shell) assert_equals(stderr, u'prepending {envpaths} to PATH'\ .format(envpaths=pathlist_to_str(_envpaths(envs, 'test1')))) assert_equals(stdout, pathsep.join(_envpaths(envs, 'test1') + [BASE_PATH, ]))
def _recalculate_derived(self): self._site_paths = tuple(self._get_site_paths()) environ_path = self._environ.get("PATH") if environ_path: self._environ["PATH"] = pathsep.join([self._exec_dir] + environ_path.split(pathsep)) self._long_desc = "%s version %s on %s in %s" % (self.name, ".".join( str(v) for v in self.version), self.platform, self.executable) self._short_desc = "%s %s" % (self.name, ".".join( str(v) for v in self.version))
def print_path(ctx, _, value): """\ Prints the auto discovered plugin path. Packages that register an ``yang.plugins`` entry-point will be auto-detected. """ if not value or ctx.resilient_parsing: return click.echo(pathsep.join(expanded())) ctx.exit()
def test_activate_test1(shell): shell_vars = _format_vars(shell) with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (shell_vars['command_setup'] + """ {source} "{syspath}{cmd_path}activate{shell_suffix}" "{env_dirs[0]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs, shell), **shell_vars) stdout, stderr = run_in(commands, shell) assert_equals(stderr, u'prepending {envpaths} to PATH'\ .format(envpaths=pathlist_to_str(_envpaths(envs, 'test1'), False)), shell) assert_in(pathsep.join(_envpaths(envs, 'test1')), shells[shell]["path_from"](stdout), shell)
def test_which_with_node_modules(self): driver = base.BaseDriver() # ensure that NODE_PATH is initially None driver.node_path = None driver.working_dir = mkdtemp(self) # initially should be empty, since no node_modules in either # directories that it should check with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # should not generate extra log messages. self.assertNotIn('will attempt', s.getvalue()) # having the NODE_PATH defined will result in such p1 = mkdtemp(self) p2 = mkdtemp(self) driver.node_path = pathsep.join([p1, p2]) with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # should not generate extra log messages, binary still not # assigned. self.assertNotIn('will attempt', s.getvalue()) driver.binary = 'dummy' with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) # now the log should show what attempted. log = s.getvalue() self.assertIn( "'BaseDriver' instance will attempt to locate 'dummy' binary from " "its NODE_PATH of", log) self.assertIn(p1, log) self.assertIn(p2, log) self.assertIn("'BaseDriver' instance located 2 possible paths", log) # try again with working directory driver.node_path = None dwd_wd_nm = join(driver.working_dir, 'node_modules') os.mkdir(dwd_wd_nm) with pretty_logging(stream=mocks.StringIO()) as s: self.assertIsNone(driver.which_with_node_modules()) log = s.getvalue() # now the log should show what attempted. self.assertIn( "'BaseDriver' instance will attempt to locate 'dummy' binary from", log, ) self.assertIn(dwd_wd_nm, log) self.assertIn("located through the working directory", log) self.assertIn("'BaseDriver' instance located 1 possible paths", log)
def build(self): pre_ts = GraalNodeJsBuildTask._get_newest_ts(self.subject.getResults(), fatalIfMissing=False) build_env = os.environ.copy() _setEnvVar('PATH', '%s%s%s' % (join(_suite.mxDir, 'python2'), pathsep, build_env['PATH']), build_env) debug = ['--debug'] if self._debug_mode else [] shared_library = ['--enable-shared-library'] if hasattr(self.args, 'sharedlibrary') and self.args.sharedlibrary else [] newest_config_file_ts = GraalNodeJsBuildTask._get_newest_ts(_config_files, fatalIfMissing=True) newest_generated_config_file_ts = GraalNodeJsBuildTask._get_newest_ts(_generated_config_files, fatalIfMissing=False) # Lazily generate config files only if `configure` and `configure.py` are older than the files they generate. # If we don't do this, the `Makefile` always considers `config.gypi` out of date, triggering a second, unnecessary configure. lazy_generator = ['--lazy-generator'] if newest_generated_config_file_ts.isNewerThan(newest_config_file_ts) else [] if _is_windows: processDevkitRoot(env=build_env) _setEnvVar('PATH', pathsep.join([build_env['PATH']] + [mx.library(lib_name).get_path(True) for lib_name in ('NASM', 'NINJA')]), build_env) extra_flags = ['--ninja', '--dest-cpu=x64', '--without-etw', '--without-snapshot'] else: extra_flags = [] _mxrun(python_cmd() + [join(_suite.dir, 'configure'), '--partly-static', '--without-dtrace', '--without-snapshot', '--without-node-snapshot', '--java-home', _java_home() ] + debug + shared_library + lazy_generator + extra_flags, cwd=_suite.dir, verbose=True, env=build_env) if _is_windows: verbose = ['-v'] if mx.get_opts().verbose else [] # The custom env is not used to resolve the location of the executable _mxrun([join(mx.library('NINJA').get_path(True), 'ninja.exe')] + verbose + ['-j%d' % self.parallelism, '-C', self._build_dir], env=build_env) else: verbose = 'V={}'.format('1' if mx.get_opts().verbose else '') _mxrun([mx.gmake_cmd(), '-j%d' % self.parallelism, verbose], cwd=_suite.dir, verbose=True, env=build_env) # put headers for native modules into out/headers _setEnvVar('HEADERS_ONLY', '1', build_env) out = None if mx.get_opts().verbose else open(os.devnull, 'w') _mxrun(python_cmd() + [join('tools', 'install.py'), 'install', join('out', 'headers'), sep], out=out, env=build_env) post_ts = GraalNodeJsBuildTask._get_newest_ts(self.subject.getResults(), fatalIfMissing=True) mx.logv('Newest time-stamp before building: {}\nNewest time-stamp after building: {}\nHas built? {}'.format(pre_ts, post_ts, post_ts.isNewerThan(pre_ts))) built = post_ts.isNewerThan(pre_ts) if built and _current_os == 'darwin': nodePath = join(self._build_dir, 'node') _mxrun(['install_name_tool', '-add_rpath', join(_java_home(), 'jre', 'lib'), '-add_rpath', join(_java_home(), 'lib'), nodePath], verbose=True, env=build_env) return built
def test_finalize_env_win32(self): sys.platform = 'win32' # when os.environ is empty or missing the required keys, the # values will be empty strings. os.environ = {} self.assertEqual(finalize_env({}), { 'PATH': '', 'PATHEXT': '', 'SYSTEMROOT': ''}) # should be identical with the keys copied os.environ['PATH'] = 'C:\\Windows' os.environ['PATHEXT'] = pathsep.join(('.com', '.exe', '.bat')) os.environ['SYSTEMROOT'] = 'C:\\Windows' self.assertEqual(finalize_env({}), os.environ)
def test_activate_bad_env_keeps_existing_good_env(shell): shell_vars = _format_vars(shell) with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (shell_vars['command_setup'] + """ {source} {syspath}{cmd_path}activate "{env_dirs[0]}" {nul} {source} "{syspath}{cmd_path}activate" "{env_dirs[2]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs, shell), **shell_vars) stdout, stderr = run_in(commands, shell) assert_in(pathsep.join(_envpaths(envs, 'test1')), shells[shell]["path_from"](stdout))
def inject_ansible_paths(): # collect and inject ansible paths (roles and library) from entrypoints try: import ansible.constants as C import ansible except ImportError: log.error("Can't import ansible, check whether it's installed correctly.") sys.exit(1) if get_ansible_version() >= (1, 10): log.warn( "You are using an untested version %s of ansible. " "The latest tested version is 1.9.X. " "Any errors may be caused by that newer version." % ansible.__version__) extra_roles = [] extra_library = [] plugin_path_names = set(x for x in dir(C) if x.endswith('_PLUGIN_PATH')) extra_plugins = {} for entrypoint in pkg_resources.iter_entry_points(group='ansible_paths'): pathinfo = entrypoint.load() extra_roles.extend(pathinfo.get('roles', [])) extra_library.extend(pathinfo.get('library', [])) for key in pathinfo: plugin_path_name = 'DEFAULT_%s_PLUGIN_PATH' % key.upper() if plugin_path_name in plugin_path_names: extra_plugins.setdefault(plugin_path_name, []).extend(pathinfo[key]) roles = list(extra_roles) if C.DEFAULT_ROLES_PATH is not None: roles.append(C.DEFAULT_ROLES_PATH) if roles: C.DEFAULT_ROLES_PATH = pathsep.join(roles) library = list(extra_library) if C.DEFAULT_MODULE_PATH is not None: library.append(C.DEFAULT_MODULE_PATH) if library: C.DEFAULT_MODULE_PATH = pathsep.join(library) for attr in extra_plugins: setattr(C, attr, pathsep.join([pathsep.join(extra_plugins[attr]), getattr(C, attr)]))
def test_finalize_env_win32(self): sys.platform = 'win32' # when os.environ is empty or missing the required keys, the # values will be empty strings. os.environ = {} self.assertEqual(finalize_env({}), { 'APPDATA': '', 'PATH': '', 'PATHEXT': '', 'SYSTEMROOT': ''}) # should be identical with the keys copied os.environ['APPDATA'] = 'C:\\Users\\Guest\\AppData\\Roaming' os.environ['PATH'] = 'C:\\Windows' os.environ['PATHEXT'] = pathsep.join(('.com', '.exe', '.bat')) os.environ['SYSTEMROOT'] = 'C:\\Windows' self.assertEqual(finalize_env({}), os.environ)
def test_activate_bad_env_keeps_existing_good_env(): for shell in shells: with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (command_setup + """ {source} {syspath}{binpath}activate "{env_dirs[0]}" {nul} {source} {syspath}{binpath}activate "{env_dirs[2]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs), **_format_vars) stdout, stderr = run_in(commands, shell) assert_equals( stdout, pathsep.join(_envpaths(envs, 'test1')) + pathsep + BASE_PATH, stderr)
def test_found_win32(self): sys.platform = 'win32' tempdir = os.environ['PATH'] = mkdtemp(self) os.environ['PATHEXT'] = pathsep.join(('.com', '.exe', '.bat')) f = join(tempdir, 'binary.exe') with open(f, 'w'): pass os.chmod(f, 0o777) self.assertEqual(which('binary'), f) self.assertEqual(which('binary.exe'), f) self.assertIsNone(which('binary.com')) os.environ['PATH'] = '' self.assertEqual(which('binary', path=tempdir), f) self.assertEqual(which('binary.exe', path=tempdir), f) self.assertIsNone(which('binary.com', path=tempdir))
def test_activate_test1(): for shell in shells: with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (command_setup + """ {source} {syspath}{binpath}activate{shell_suffix} "{env_dirs[0]}" {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs), **_format_vars) stdout, stderr = run_in(commands, shell) assert_equals(stderr, u'prepending {envpaths} to PATH'\ .format(envpaths=pathlist_to_str(_envpaths(envs, 'test1')))) assert_equals( stdout, pathsep.join(_envpaths(envs, 'test1') + [ BASE_PATH, ]))
def test_activate_root_env_from_other_env(): for shell in shells: with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (command_setup + """ {source} {syspath}{binpath}activate "{env_dirs[0]}" {nul} {source} {syspath}{binpath}activate root {printpath} """).format(envs=envs, env_dirs=gen_test_env_paths(envs), **_format_vars) stdout, stderr = run_in(commands, shell) assert_equals( stdout, u"%s" % pathsep.join(_envpaths(root_dir) + [ BASE_PATH, ]), stderr)
def test_set_env_path_with_node_path_multiple_with_environ(self): tmp = mkdtemp(self) tmp1, bin_dir1, _ = self.fake_mgr_bin() tmp2, bin_dir2, _ = self.fake_mgr_bin() node_path = pathsep.join( join(d, 'node_modules') for d in (tmp, tmp1, tmp2)) driver = cli.PackageManagerDriver( pkg_manager_bin='mgr', node_path=node_path) self.assertTrue(driver._set_env_path_with_node_modules()) # First one. Whether the node modules loads correctly, that's # up to the nodejs circus. self.assertEqual(driver.env_path, bin_dir1) # ensure the kws generated correctly. env = driver._gen_call_kws()['env'] self.assertEqual(env['NODE_PATH'], node_path) self.assertEqual(env['PATH'].split(pathsep)[0], bin_dir1)
def test_set_env_path_with_node_path_multiple_with_environ(self): tmp = mkdtemp(self) tmp1, bin_dir1 = self.fake_mgr_bin() tmp2, bin_dir2 = self.fake_mgr_bin() node_path = pathsep.join( join(d, 'node_modules') for d in (tmp, tmp1, tmp2)) driver = cli.PackageManagerDriver(pkg_manager_bin='mgr', node_path=node_path) self.assertTrue(driver._set_env_path_with_node_modules()) # First one. Whether the node modules loads correctly, that's # up to the nodejs circus. self.assertEqual(driver.env_path, bin_dir1) # ensure the kws generated correctly. env = driver._gen_call_kws()['env'] self.assertEqual(env['NODE_PATH'], node_path) self.assertEqual(env['PATH'].split(pathsep)[0], bin_dir1)
def which_with_node_modules(self): """ Which with node_path and node_modules """ if self.binary is None: return None # first, log down the pedantic things... if isdir(self.join_cwd(NODE_MODULES)): logger.debug( "'%s' instance will attempt to locate '%s' binary from " "%s%s%s%s%s, located through the working directory", self.__class__.__name__, self.binary, self.join_cwd(), sep, NODE_MODULES, sep, NODE_MODULES_BIN, ) if self.node_path: logger.debug( "'%s' instance will attempt to locate '%s' binary from " "its %s of %s", self.__class__.__name__, self.binary, NODE_PATH, self.node_path, ) paths = self.find_node_modules_basedir() whichpaths = pathsep.join(join(p, NODE_MODULES_BIN) for p in paths) if paths: logger.debug( "'%s' instance located %d possible paths to the '%s' binary, " "which are %s", self.__class__.__name__, len(paths), self.binary, whichpaths, ) return which(self.binary, path=whichpaths)
def test_found_win32(self): sys.platform = 'win32' tempdir = os.environ['PATH'] = mkdtemp(self) os.environ['PATHEXT'] = pathsep.join(('.com', '.exe', '.bat')) f = join(tempdir, 'binary.exe') with open(f, 'w'): pass os.chmod(f, 0o777) self.assertEqual(which('binary'), f) self.assertEqual(which('binary.exe'), f) self.assertEqual(which(f), f) self.assertIsNone(which('binary.com')) os.environ['PATH'] = '' self.assertEqual(which('binary', path=tempdir), f) self.assertEqual(which('binary.exe', path=tempdir), f) self.assertEqual(which(f, path=tempdir), f) self.assertIsNone(which('binary.com', path=tempdir))
def _gen_call_kws(self, **env): kw = {} if self.node_path is not None: env[NODE_PATH] = self.node_path if self.env_path is not None: # Initial assignment with check _check_isdir_assign_key(env, 'PATH', self.env_path) # then append the rest of it env['PATH'] = pathsep.join( [env.get('PATH', ''), os.environ.get('PATH', '')]) if self.working_dir: _check_isdir_assign_key( kw, 'cwd', self.working_dir, error_msg="current working directory left as default") kw['env'] = finalize_env(env) return kw
def test_activate_root(shell): shell_vars = _format_vars(shell) with TemporaryDirectory(prefix='envs', dir=dirname(__file__)) as envs: commands = (shell_vars['command_setup'] + """ {source} "{syspath}{cmd_path}activate" root {printpath} """).format(envs=envs, **shell_vars) stdout, stderr = run_in(commands, shell) assert_in(shells[shell]["path_to"](pathsep.join(_envpaths(root_dir))), stdout) commands = (shell_vars['command_setup'] + """ {source} "{syspath}{cmd_path}activate" root {source} "{syspath}{cmd_path}deactivate" {printpath} """).format(envs=envs, **shell_vars) stdout, stderr = run_in(commands, shell) assert_equals(stdout, u"%s" % shell_vars['base_path'], stderr)
def export_path(ctx, _, value): """\ Prints an export shell statement with the auto discovered plugin path. This may be used by shell script to configure ``PYANG_PLUGINPATH`` environment variable. Example: :: eval $(pyangext --export-path) """ if not value or ctx.resilient_parsing: return click.echo( 'export PYANG_PLUGINPATH=' + shlex_quote(pathsep.join(expanded()))) ctx.exit()
def run(self): """Call java.""" from os import environ from os.path import pathsep jar = self.get_kwarg('jar', (Path, str), noNone=True) if self.properties: sysprop = ['-D%s=%s' % x for x in self.properties] else: sysprop = () cmd = (self.java_prog,) + tuple(sysprop) + \ ('-jar', str(jar),) + \ tuple([str(s) for s in self.args]) env = environ.copy() if self.classpath: env['CLASSPATH'] = pathsep.join(self.classpath) proc = Subcommand(cmd) if proc.returncode: raise Error( self, '%s failed with returncode %d' % (self.__class__.__name__.lower(), proc.returncode))
def get_git_environ_adjusted(env=None): """ Replaces GIT_DIR and GIT_WORK_TREE with absolute paths if relative path and defined """ # if env set copy else get os environment git_env = env.copy() if env else os.environ.copy() if GitRunner._GIT_PATH: git_env['PATH'] = pathsep.join([GitRunner._GIT_PATH, git_env['PATH']]) \ if 'PATH' in git_env \ else GitRunner._GIT_PATH for varstring in ['GIT_DIR', 'GIT_WORK_TREE']: var = git_env.get(varstring) if var: # if env variable set if not isabs(var): # and it's a relative path git_env[varstring] = abspath(var) # to absolute path lgr.log(9, "Updated %s to %s", varstring, git_env[varstring]) if 'GIT_SSH_COMMAND' not in git_env: git_env['GIT_SSH_COMMAND'] = GIT_SSH_COMMAND return git_env
def inject_ansible_paths(): # collect and inject ansible paths (roles and library) from entrypoints try: import ansible.constants as C except ImportError: log.error("Can't import ansible, check whether it's installed correctly.") sys.exit(1) extra_roles = [] extra_library = [] plugin_path_names = set(x for x in dir(C) if x.endswith('_PLUGIN_PATH')) extra_plugins = {} for entrypoint in pkg_resources.iter_entry_points(group='ansible_paths'): pathinfo = entrypoint.load() extra_roles.extend(pathinfo.get('roles', [])) extra_library.extend(pathinfo.get('library', [])) for key in pathinfo: plugin_path_name = 'DEFAULT_%s_PLUGIN_PATH' % key.upper() if plugin_path_name in plugin_path_names: extra_plugins.setdefault(plugin_path_name, []).extend(pathinfo[key]) C.DEFAULT_ROLES_PATH = pathsep.join([pathsep.join(extra_roles), C.DEFAULT_ROLES_PATH]) C.DEFAULT_MODULE_PATH = pathsep.join([pathsep.join(extra_library), C.DEFAULT_MODULE_PATH]) for attr in extra_plugins: setattr(C, attr, pathsep.join([pathsep.join(extra_plugins[attr]), getattr(C, attr)]))
"mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.6" } }, "nbformat": 4, "nbformat_minor": 0 } """.replace('IVY_FILENAME', repr(ivy_filename)) # if X.bmc.ipynb exists, open it, otherwise create a new X.ivy.bmc.ipynb notebook_filename = ivy_filename[:-4] + '.bmc.ipynb' if os.path.isfile(notebook_filename): print "Opening existing notebook: {}".format(notebook_filename) else: notebook_filename = ivy_filename + '.bmc.ipynb' print "Creating new notebook: {}".format(notebook_filename) open(notebook_filename, 'w').write(notebook_source) d = dirname(__file__) os.environ['PYTHONPATH'] = pathsep.join([ os.environ['PYTHONPATH'], d, join(d, pardir, 'src', 'ivy'), ]) sys.argv = ['ipython', 'notebook', notebook_filename] sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(IPython.start_ipython())
if 'win' in platform: paths = [join(env_root, env_name).rstrip("\\"), join(env_root, env_name, 'Scripts'), join(env_root, env_name, 'Library', 'bin'), ] else: paths = [join(env_root, env_name).rstrip("/"), join(env_root, env_name, 'bin'), ] return paths PYTHONPATH = os.path.dirname(os.path.dirname(__file__)) BASE_PATH = os.getenv("PATH") # Make sure the subprocess activate calls this python syspath = pathsep.join(_envpaths(root_dir)) echo = "echo" escape_curly = lambda x: x.replace("{", "{{").replace("}", "}}") if platform.startswith("win"): shells = ['cmd.exe'] ps_var = "PROMPT" var_format = "%{var}%" binpath = "\\Scripts\\" # mind the trailing slash. source_setup = "call" nul = '1>NUL 2>&1' set_var = 'set ' shell_suffix = ".bat" printps1 = '{echo} {var}'.format(echo=echo if os.getenv(ps_var) else "echo.", var=var_format.format(var=ps_var))
def karma_environ(driver): base_keys = extract_gui_environ_keys() base_keys['NODE_PATH'] = pathsep.join(driver.find_node_modules_basedir()) return base_keys
def unique_paths(key): paths = get_paths(key) used_paths = set() paths = (path for path in paths if not (path in used_paths or used_paths.add(path))) command_export(key, pathsep.join(paths))
def sync(self): for key, value in self._flush.iteritems(): self.dbm[key] = pathsep.join(value).encode('utf-8') self.dbm.sync()
# this script is used on windows to wrap shortcuts so that they are executed within an environment # It only sets the appropriate prefix PATH entries - it does not actually activate environments import os import sys import subprocess from os.path import join, pathsep from menuinst.knownfolders import FOLDERID, get_folder_path, PathNotFoundException # call as: python cwp.py PREFIX ARGs... prefix = sys.argv[1] args = sys.argv[2:] new_paths = pathsep.join([prefix, join(prefix, "Library", "mingw-w64", "bin"), join(prefix, "Library", "usr", "bin"), join(prefix, "Library", "bin"), join(prefix, "Scripts")]) env = os.environ.copy() env['PATH'] = new_paths + pathsep + env['PATH'] env['CONDA_PREFIX'] = prefix documents_folder, exception = get_folder_path(FOLDERID.Documents) if exception: documents_folder, exception = get_folder_path(FOLDERID.PublicDocuments) if not exception: os.chdir(documents_folder) sys.exit(subprocess.call(args, env=env))
# Set up the environment for various extension modules. # Most variables are used to make this python installation # relocatable. # # Note: we use os.putenv, because we do not want our variable settings to show up # in os.environ. # # # XDG Base Directory Specification # See: http://freedesktop.org/wiki/Standards/basedir-spec?action=show # # $XDG_DATA_DIRS defines the preference-ordered set of base directories to # search for data files in addition to the $XDG_DATA_HOME base directory. putenv("XDG_DATA_DIRS", pathsep.join((normpath(join(dir, pardir, "share")), normpath(join(dir, pardir, pardir, pardir, "share"))))) # $XDG_CONFIG_DIRS defines the preference-ordered set of base directories to # search for configuration files in addition to the $XDG_CONFIG_HOME base # directory. putenv("XDG_CONFIG_DIRS", pathsep.join((normpath(join(dir, pardir, "etc")), normpath(join(dir, pardir, pardir, pardir, "etc"))))) # GLIB environment variables. See http://library.gnome.org/devel/glib/stable/glib-running.html # LIBCHARSET_ALIAS_DIR. Allows to specify a nonstandard location for the # charset.aliases file that is used by the character set conversion routines. # The default location is the libdir specified at compilation time. putenv("LIBCHARSET_ALIAS_DIR", normpath(join(dir, pardir, "lib"))) # TZDIR. Allows to specify a nonstandard location for the timezone data files # that are used by the #GDateTime API. The default location is under
def inspect(args): """launch a given program under the Inspector Run Maxine under the Inspector. The arguments accepted by this command are those accepted by the 'mx vm' command plus the Inspector specific options. To debug a program in the Inspector, simply replace 'vm' on the command line that launches the program with 'inspect'. Use "mx inspect --help" to see what the Inspector options are. These options must be specified with a '--' prefix so that they can be distinguished from the VM options. The inspect command also accepts the same system property related options as the 'image' command except that a '--' prefix must be used (e.g. '--os Darwin --bits 32'). Use "mx help image" for more detail. Use "mx vm -help" to see what the VM options are.""" saveClassDir = join(_vmdir, 'inspected_classes') maxvmOptions = os.getenv('MAXVM_OPTIONS', '').split() vmArgs = ['-XX:SaveClassDir=' + saveClassDir, '-XX:+TrapOnError'] + maxvmOptions insArgs = ['-vmdir=' + _vmdir] if not isdir(saveClassDir): os.makedirs(saveClassDir) sysProps = [] sysProps += ['-Xbootclasspath/a:' + mx.distribution('GRAAL').path] insCP = [] cwdArgs = check_cwd_change(args) cwd = cwdArgs[0] args = cwdArgs[1] i = 0 remote = False while i < len(args): arg = args[i] if arg.startswith('-XX:LogFile='): logFile = arg.split('=', 1)[1] vmArgs += [arg] os.environ['TELE_LOG_FILE'] = 'tele-' + logFile elif arg in ['-cp', '-classpath']: vmArgs += [arg, args[i + 1]] insCP += [mx.expand_project_in_class_path_arg(args[i + 1])] i += 1 elif arg == '-jar': vmArgs += ['-jar', args[i + 1]] insCP += [args[i + 1]] i += 1 elif arg == '--remote': remote = True elif arg in ['--platform', '--cpu', '--isa', '--os', '--endianness', '--bits', '--page', '--nsig']: name = arg.lstrip('-') i += 1 value = args[i] sysProps += ['-Dmax.' + name + '=' + value] elif arg.startswith('--cp='): insCP += [arg[len('--cp='):]] elif arg.startswith('--'): # chomp leading '-' insArgs += [arg[1:]] elif arg.startswith('-XX:SaveClassDir='): vmArgs += [arg] saveClassDir = arg.split('=', 1)[1] if not isdir(saveClassDir): os.makedirs(saveClassDir) elif arg.startswith('-'): vmArgs += [arg] else: # This is the main class argument; copy it and any following # arguments to the VM verbatim vmArgs += args[i:] break i += 1 insCP += [saveClassDir] insCP = pathsep.join(insCP) insArgs += ['-cp=' + insCP] mx.expand_project_in_args(vmArgs) cmd = mx.java().format_cmd(sysProps + ['-cp', sanitized_classpath() + pathsep + insCP, 'com.sun.max.ins.MaxineInspector'] + insArgs + ['-a=' + ' '.join(vmArgs)]) if mx.get_os() == 'darwin' and not remote: # The -E option propagates the environment variables into the sudo process mx.run(['sudo', '-E', '-p', 'Debugging is a privileged operation on Mac OS X.\nPlease enter your "sudo" password:'] + cmd, cwd=cwd) else: mx.run(cmd, cwd=cwd)
def test_multiple_values(self): namespace = Namespace() parser = None action = StorePathSepDelimitedList('', dest='basic') action(parser, namespace, [pathsep.join(['file1', 'file2'])]) self.assertEqual(namespace.basic, ['file1', 'file2'])
def _set_env_path_with_node_modules(self): """ Attempt to locate and set the paths to the binary with the working directory defined for this instance. """ modcls_name = ':'.join( (self.__class__.__module__, self.__class__.__name__)) if self.binary is None: raise ValueError("binary undefined for '%s' instance" % modcls_name) logger.debug( "locating '%s' node binary for %s instance...", self.binary, modcls_name, ) default = self.which() if default is not None: logger.debug( "found '%s'; " "not modifying PATH environment variable in instance of '%s'.", realpath(default), modcls_name) return True node_path = self.node_path if node_path: logger.debug( "environment variable '%s' defined '%s'; " "their bin directories will be searched.", NODE_PATH, node_path, ) else: node_path = self.join_cwd('node_modules') logger.debug( "environment variable '%s' undefined; using instance's " "working directory's node_modules (%s) as base directory for " "finding node binaries.", NODE_PATH, node_path, ) target = which(self.binary, path=pathsep.join( join(p, '.bin') for p in node_path.split(pathsep))) if target: # Only setting the path specific for the binary; side effect # will be whoever else borrowing the _exec in here might not # get the binary they want. That's why it's private. self.env_path = dirname(target) logger.debug( "located '%s' binary at '%s'; setting PATH environment " "variable for '%s' instance.", self.binary, self.env_path, modcls_name) return True else: logger.debug( "Unable to locate '%s'; not modifying PATH environment " "variable for instance of '%s'.", self.binary, modcls_name) return False
}, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.6" } }, "nbformat": 4, "nbformat_minor": 0 } """.replace( "IVY_FILENAME", repr(ivy_filename) ) # if X.ipynb exists, open it, otherwise create a new X.ivy.ipynb notebook_filename = ivy_filename[:-4] + ".ipynb" if os.path.isfile(notebook_filename): print "Opening existing notebook: {}".format(notebook_filename) else: notebook_filename = ivy_filename + ".ipynb" print "Creating new notebook: {}".format(notebook_filename) open(notebook_filename, "w").write(notebook_source) d = dirname(__file__) os.environ["PYTHONPATH"] = pathsep.join([os.environ["PYTHONPATH"], d, join(d, pardir, "src", "ivy")]) sys.argv = ["ipython", "notebook", notebook_filename] sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0]) sys.exit(IPython.start_ipython())