def test_getsitepackages(self): site.PREFIXES = ["xoxo"] dirs = site.getsitepackages() if sys.platform in ("os2emx", "riscos"): self.assertEqual(len(dirs), 1) wanted = os.path.join("xoxo", "Lib", "site-packages") self.assertEquals(dirs[0], wanted) elif os.sep == "/": self.assertEqual(len(dirs), 2) wanted = os.path.join("xoxo", "lib", "python" + sys.version[:3], "site-packages") self.assertEquals(dirs[0], wanted) wanted = os.path.join("xoxo", "lib", "site-python") self.assertEquals(dirs[1], wanted) else: self.assertEqual(len(dirs), 2) self.assertEquals(dirs[0], "xoxo") wanted = os.path.join("xoxo", "lib", "site-packages") self.assertEquals(dirs[1], wanted) # let's try the specific Apple location if sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK"): site.PREFIXES = ["Python.framework"] dirs = site.getsitepackages() self.assertEqual(len(dirs), 3) wanted = os.path.join("/Library", "Python", sys.version[:3], "site-packages") self.assertEquals(dirs[2], wanted)
def main(): mode = sys.argv[1] print mode rootdir = "/Users/kgeorge/Dropbox/cars/tentative" if mode == "b": rootdir = "/Users/kgeorge/Dropbox/cars/bad" elif mode == "c": rootdir = "/Users/kgeorge/Dropbox/cars/crossvalidate" builddir = os.path.join(rootdir, "build") print site.getsitepackages() for root, dirs, files in os.walk(rootdir): (h, t) = os.path.split(root) if t.endswith("_delme"): continue for f in files: (p, e) = os.path.splitext(f) if ((e.lower() == ".png") or (e.lower() == ".jpg") or (e.lower() == ".jpeg")) and not p.endswith( "canonical" ): try: processImage2(builddir, root, f) except AttributeError, e: print e pass
def find_DST(): """Find where this package should be installed to. """ if SYS_NAME == "Windows": return os.path.join(site.getsitepackages()[1], PKG_NAME) elif SYS_NAME in ["Darwin", "Linux"]: return os.path.join(site.getsitepackages()[0], PKG_NAME)
def build(buildargs=['-y', '-windowed', '--onedir', '--clean', '--icon="icons/desuratools.ico"', '--noupx', '--version-file=versioninfo.txt'], package=True): pyinstaller = os.path.join(site.getsitepackages()[0], "Scripts", "pyinstaller-script.py") dependencies = ['PySide', 'PIL', 'win32api', 'win32gui', 'win32ui', 'win32con', 'requests'] imageformats = os.path.join(site.getsitepackages()[1], "PySide", "plugins", "imageformats") buildargs.insert(0, 'desuratools.py') buildargs.insert(0, pyinstaller) buildargs.insert(0, "python") dist_folder = os.path.join(os.getcwd(), "dist") output_folder = os.path.join(os.getcwd(), "dist", "desuratools") if not os.path.exists(pyinstaller): raise IOError("PyInstaller is required to build for windows") print "PyInstaller check passed" for module in dependencies: try: imp.find_module(module) except ImportError: raise ImportError("Dependency {0} is required".format(module)) print "Dependency check passed" print "Building DesuraTools" subprocess.call(' '.join(buildargs)) print "Copying imageformat plugins" imageformats_dist = os.path.join(output_folder, "imageformats") distutils.dir_util.copy_tree(imageformats, imageformats_dist, verbose=1) print "Copying icon" images_dist = os.path.join(output_folder, "desuratools_256.png") shutil.copyfile("desuratools_256.png", images_dist) if package: package_app(dist_folder, output_folder)
def test_s_option(self): usersite = site.USER_SITE self.assertIn(usersite, sys.path) env = os.environ.copy() rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) self.assertEqual(rc, 1) env = os.environ.copy() rc = subprocess.call([sys.executable, '-s', '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) if usersite == site.getsitepackages()[0]: self.assertEqual(rc, 1) else: self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONNOUSERSITE"] = "1" rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) if usersite == site.getsitepackages()[0]: self.assertEqual(rc, 1) else: self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONUSERBASE"] = "/tmp" rc = subprocess.call([sys.executable, '-c', 'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'], env=env) self.assertEqual(rc, 1)
def test_getsitepackages(self): site.PREFIXES = ['xoxo'] dirs = site.getsitepackages() if (sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK")): # OS X framework builds site.PREFIXES = ['Python.framework'] dirs = site.getsitepackages() self.assertEqual(len(dirs), 2) wanted = os.path.join('/Library', sysconfig.get_config_var("PYTHONFRAMEWORK"), '%d.%d' % sys.version_info[:2], 'site-packages') self.assertEqual(dirs[1], wanted) elif os.sep == '/': # OS X non-framwework builds, Linux, FreeBSD, etc self.assertEqual(len(dirs), 1) wanted = os.path.join('xoxo', 'lib', 'python%d.%d' % sys.version_info[:2], 'site-packages') self.assertEqual(dirs[0], wanted) else: # other platforms self.assertEqual(len(dirs), 2) self.assertEqual(dirs[0], 'xoxo') wanted = os.path.join('xoxo', 'lib', 'site-packages') self.assertEqual(dirs[1], wanted)
def test_getsitepackages(self): site.PREFIXES = ["xoxo"] dirs = site.getsitepackages() if sys.platform in ("os2emx", "riscos"): self.assertEqual(len(dirs), 1) wanted = os.path.join("xoxo", "Lib", "site-packages") self.assertEqual(dirs[0], wanted) elif sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK"): # OS X framework builds site.PREFIXES = ["Python.framework"] dirs = site.getsitepackages() self.assertEqual(len(dirs), 3) wanted = os.path.join( "/Library", sysconfig.get_config_var("PYTHONFRAMEWORK"), sys.version[:3], "site-packages" ) self.assertEqual(dirs[2], wanted) elif os.sep == "/": # OS X non-framwework builds, Linux, FreeBSD, etc self.assertEqual(len(dirs), 3) wanted = os.path.join("xoxo", "lib", "python" + sys.version[:3], "site-packages") self.assertEqual(dirs[0], wanted) wanted = os.path.join("xoxo", "lib", "site-python") self.assertEqual(dirs[1], wanted) else: # other platforms self.assertEqual(len(dirs), 2) self.assertEqual(dirs[0], "xoxo") wanted = os.path.join("xoxo", "lib", "site-packages") self.assertEqual(dirs[1], wanted)
def test_getsitepackages(self): site.PREFIXES = ['xoxo'] dirs = site.getsitepackages() if sys.platform in ('os2emx', 'riscos'): self.assertEqual(len(dirs), 1) wanted = os.path.join('xoxo', 'Lib', 'site-packages') self.assertEqual(dirs[0], wanted) elif os.sep == '/': self.assertEqual(len(dirs), 2) wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3], 'site-packages') self.assertEqual(dirs[0], wanted) wanted = os.path.join('xoxo', 'lib', 'site-python') self.assertEqual(dirs[1], wanted) else: self.assertEqual(len(dirs), 2) self.assertEqual(dirs[0], 'xoxo') wanted = os.path.join('xoxo', 'lib', 'site-packages') self.assertEqual(dirs[1], wanted) # let's try the specific Apple location if (sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK")): site.PREFIXES = ['Python.framework'] dirs = site.getsitepackages() self.assertEqual(len(dirs), 3) wanted = os.path.join('/Library', 'Python', sys.version[:3], 'site-packages') self.assertEqual(dirs[2], wanted)
def plotgraph(plotvar, divid, omc, resultfile): if (resultfile is not None): checkdygraph = os.path.join(os.getcwd(), 'dygraph-combined.js') if not os.path.exists(checkdygraph): if (sys.platform == 'win32'): try: sitepath = site.getsitepackages()[1] dygraphfile = os.path.join(sitepath, 'openmodelica_kernel', 'dygraph-combined.js').replace('\\', '/') shutil.copy2(dygraphfile, os.getcwd()) # print 'copied file' except Exception as e: print(e) else: try: sitepath = site.getsitepackages()[0] dygraphfile = os.path.join(sitepath, 'openmodelica_kernel', 'dygraph-combined.js').replace('\\', '/') shutil.copy2(dygraphfile, os.getcwd()) # print 'copied file' except Exception as e: print(e) try: divheader = " ".join(['<div id=' + str(divid) + '>', '</div>']) readResult = omc.sendExpression("readSimulationResult(\"" + resultfile + "\",{time," + plotvar + "})") omc.sendExpression("closeSimulationResultFile()") plotlabels = ['Time'] exp = '(\s?,\s?)(?=[^\[]*\])|(\s?,\s?)(?=[^\(]*\))' # print 'inside_plot1' subexp = re.sub(exp, '$#', plotvar) plotvalsplit = subexp.split(',') # print plotvalsplit for z in range(len(plotvalsplit)): val = plotvalsplit[z].replace('$#', ',') plotlabels.append(val) plotlabel1 = [str(x) for x in plotlabels] plots = [] for i in range(len(readResult)): x = readResult[i] d = [] for z in range(len(x)): tu = x[z] d.append((tu,)) plots.append(d) n = numpy.array(plots) numpy.set_printoptions(threshold=numpy.inf) dygraph_array = repr(numpy.hstack(n)).replace('array', ' ').replace('(', ' ').replace(')', ' ') dygraphoptions = " ".join(['{', 'legend:"always",', 'labels:', str(plotlabel1), '}']) data = "".join(['<script type="text/javascript"> g = new Dygraph(document.getElementById(' + '"' + str(divid) + '"' + '),', str(dygraph_array), ',', dygraphoptions, ')', '</script>']) htmlhead = '''<html> <head> <script src="dygraph-combined.js"> </script> </head>''' divcontent = "\n".join([htmlhead, divheader, str(data),'</html>']) except BaseException: error = omc.sendExpression("getErrorString()") divcontent = "".join(['<p>', error, '</p>']) else: divcontent = "".join(['<p>', 'No result File Generated', '</p>']) return divcontent
def create_singlejar(output_path, classpath, runpy): jars = classpath jars.extend(find_jython_jars()) site_path = site.getsitepackages()[0] with JarPth() as jar_pth: for jar_path in sorted(jar_pth.itervalues()): jars.append(os.path.join(site_path, jar_path)) with JarCopy(output_path=output_path, runpy=runpy) as singlejar: singlejar.copy_jars(jars) log.debug("Copying standard library") for relpath, realpath in find_jython_lib_files(): singlejar.copy_file(relpath, realpath) # FOR NOW: copy everything in site-packages into Lib/ in the built jar; # this is because Jython in standalone mode has the limitation that it can # only properly find packages under Lib/ and cannot process .pth files # THIS SHOULD BE FIXED sitepackage = site.getsitepackages()[0] # copy top level packages for item in os.listdir(sitepackage): path = os.path.join(sitepackage, item) if path.endswith(".egg") or path.endswith(".egg-info") or path.endswith(".pth") or path == "jars": continue log.debug("Copying package %s", path) for pkg_relpath, pkg_realpath in find_package_libs(path): log.debug("Copy package file %s %s", pkg_relpath, pkg_realpath) singlejar.copy_file(os.path.join("Lib", item, pkg_relpath), pkg_realpath) # copy eggs for path in read_pth(os.path.join(sitepackage, "easy-install.pth")).itervalues(): relpath = "/".join(os.path.normpath(os.path.join("Lib", path)).split(os.sep)) # ZIP only uses / path = os.path.realpath(os.path.normpath(os.path.join(sitepackage, path))) if copy_zip_file(path, singlejar): log.debug("Copying %s (zipped file)", path) # tiny lie - already copied, but keeping consistent! continue log.debug("Copying egg %s", path) for pkg_relpath, pkg_realpath in find_package_libs(path): # Filter out egg metadata parts = pkg_relpath.split(os.sep) head = parts[0] if head == "EGG-INFO" or head.endswith(".egg-info"): continue singlejar.copy_file(os.path.join("Lib", pkg_relpath), pkg_realpath) if runpy and os.path.exists(runpy): singlejar.copy_file("__run__.py", runpy)
def echo_server(address, authkey): print "loading server" print sys.executable print site.getsitepackages() serv = Listener(address, authkey=authkey) print "started listener" while True: try: client = serv.accept() print "got something" if echo_client(client) == False: break except Exception: traceback.print_exc()
def installMissingPackage(packageList): a = site.getsitepackages() a = a[0] a = a.split('/') for el in a: if 'python' in el: b = el.replace('python', '') b = int(float(b)) os.system('sudo apt-get install python-numpy python-scipy python-matplotlib ipython ipython-notebook python-pandas python-sympy python-nose') if b == 2: try: os.system('sudo easy_install numpy scipy Sphinx numpydoc nose pykalman') os.system('sudo pip install cma') os.system('sudo easy_install cython') os.system('sudo pip install distlib') except: pass elif b == 3: try: os os.system('sudo easy_install3 numpy scipy Sphinx numpydoc nose pykalman') os.system('sudo pip3 install cma') os.system('sudo easy_install3 cython') os.system('sudo pip3 install distlib') except: pass os.system('clear')
def __init__(self, verbose=True): self.plugin_types = ('wralea', 'plugin', 'adapters', 'interfaces') self.groups = set() self.managers = {} self._services = {} self._interfaces = {} self._lowername = {} # list all path supporting python modules paths = site.getsitepackages() usersite = site.getusersitepackages() if isinstance(usersite, basestring): paths.append(usersite) elif isinstance(usersite, (tuple, list)): paths += list(usersite) paths += sys.path # scan all entry_point and list different groups for path in set(paths): distribs = pkg_resources.find_distributions(path) for distrib in distribs : for group in distrib.get_entry_map(): self.groups.add(group) self.groups = [group for group in self.groups] self.tags = self._clean_lst(self.groups) self._load_interfaces()
def create_sitecustomize(path): extra_sitepacks = [] sitepacks = ensure_list(site.getsitepackages()) user_sitepacks = ensure_list(site.getusersitepackages()) fp = open("bin/sitecustomize.py", "w") print >>fp, 'import os, site, sys' print >>fp, 'EXTRA_SITE_PACKAGES = ' + str(EXTRA_SITE_PACKAGES) print >>fp, 'SYSTEM_SITE_PACKAGES = ' + str(SYSTEM_SITE_PACKAGES) print >>fp, 'USER_SITE_PACKAGES = ' + str(USER_SITE_PACKAGES) if extra_sitepacks: print >>fp, 'extra_sitepacks = ["' + '", "'.join(extra_sitepacks) + '"]' else: print >>fp, 'extra_sitepacks = [ ]' if sitepacks: print >>fp, 'sitepacks = ["' + '", "'.join(sitepacks) + '"]' else: print >>fp, 'sitepacks = [ ]' if user_sitepacks: print >>fp, 'user_sitepacks = ["' + '", "'.join(user_sitepacks) + '"]' else: print >>fp, 'user_sitepacks = [ ]' print >>fp, site_script fp.close()
def uninstall(): """Uninstall psutil""" # Uninstalling psutil on Windows seems to be tricky. # On "import psutil" tests may import a psutil version living in # C:\PythonXY\Lib\site-packages which is not what we want, so # we try both "pip uninstall psutil" and manually remove stuff # from site-packages. clean() install_pip() here = os.getcwd() try: os.chdir('C:\\') while True: try: import psutil # NOQA except ImportError: break else: sh("%s -m pip uninstall -y psutil" % PYTHON) finally: os.chdir(here) for dir in site.getsitepackages(): for name in os.listdir(dir): if name.startswith('psutil'): rm(os.path.join(dir, name))
def resDataSave(bo_url , cmdstr ,workspace_name, passQ = False , dumptype='CSV', dump_filename='dump.csv'): if passQ == False: confirmStr= "=============\nSize of data exceeded display limit, dump to csv format? (yes/no)" import fileinput print confirmStr while True: choice=raw_input() if choice=="yes" or choice=="y": break elif choice=="no" or choice=="n": return False else: print confirmStr #bo_host = bo_url.split(":")[1][2:] #bo_port = bo_url.split(":")[2][:-4] import subprocess import os import signal from distutils.sysconfig import get_python_lib boshcwd =os.getcwd() #lib_path = get_python_lib() import site lib_path = site.getsitepackages()[0] # print("lib_path " +lib_path) if str(workspace_name) == "": workspace_name = '\"\"' rest = subprocess.Popen(["python", lib_path + "/dumpRes/borestful.py" , bo_url , cmdstr , str(workspace_name)], stdout=subprocess.PIPE) tocsv = subprocess.Popen(["python", lib_path + "/dumpRes/bojson2file.py", dumptype, boshcwd + "/" + dump_filename] , stdin=rest.stdout) print("dumping the data to " + dump_filename + " , type: " + dumptype + " ...") rest.wait() tocsv.wait() return True
def build_kernel_json(self): import toree toree_home = os.path.dirname(inspect.getfile(toree)) jar_dir = site.getsitepackages()[0] jar = os.path.join(jar_dir, PACKAGE_NAME, "jars", JAR_FILE) #jar = os.path.join(INSTALL_DIR, "jars", JAR_FILE) kernel_json = { "name": "eclair", "display_name": "Spark 1.6.0 (EclairJS Toree)", "language": "javascript", "argv": [ os.path.join(toree_home, 'bin/run.sh'), "--interpreter-plugin", "eclair:org.eclairjs.nashorn.JavascriptInterpreter", "--default-interpreter", "eclair", "--nosparkcontext", "--profile", "{connection_file}" ], "env": { "SPARK_OPTS": "--jars " + jar, "SPARK_HOME": os.environ['SPARK_HOME'] } } return kernel_json
def __init__(self, fileName=None): self.lexicon = {} packageFileName = str(distutils.sysconfig.get_python_lib())+"/"+PACKAGE+"/"+LOCAL+"/brill-lexicon.dat" localFileName = PACKAGE+"/"+LOCAL+"/brill-lexicon.dat" # allows looking for the package in, eg, /usr/local/lib siteFileNames = [] for dir in site.getsitepackages(): siteFileName = dir+"/"+PACKAGE+"/"+LOCAL+"/brill-lexicon.dat" if (os.path.isfile(siteFileName)): fileName = siteFileName break if fileName != None: pass elif os.path.isfile(packageFileName): fileName = packageFileName elif os.path.isfile(localFileName): fileName = localFileName else: sys.stderr.write("ERROR: Could not find default Brill lexicon.") lexiconFile = open(fileName, "r") for line in lexiconFile.readlines(): if not line.startswith("#"): col = line.split() self.lexicon[col[0]] = col[1:]
def AddToPath(self, package): # Find all 'site-packages' directories. sp = getsitepackages() sp.append(getusersitepackages()) for path in sp: if path not in sys.path and os.path.exists(path): sys.path.append(path) # Add package to sys.path. def _AddToPath(name, version): for path in sp: packageFileName = name + '-' + version files = DirFileSet(path, [packageFileName + '*.egg', packageFileName + '*.zip'], withRootDirName = True) for fn in files: if fn not in sys.path and os.path.exists(fn): sys.path.append(fn) _AddToPath(package.artifactId, package.version) _AddToPath(package.artifactId.replace('-', '_'), package.version) # Try to add all the packages that are missing in # sys.path but are listed in: easy-install.pth. for path in sp: pth = os.path.join(path, 'easy-install.pth') try: pth = OS.LoadFile(pth) pth = pth.split('\n') for fn in pth: if not fn.startswith(('#', "import ", "import\t")): fn = os.path.realpath(os.path.join(path, fn)) if fn not in sys.path and os.path.exists(fn): sys.path.append(fn) except Exception as E: #self.Log(str(E), level = LogLevel.VERBOSE) pass
def build(): pyinstaller = os.path.join(site.getsitepackages()[0], "Scripts", "pyinstaller-script.py") if not os.path.exists(pyinstaller): raise IOError("PyInstaller is required to build for windows") print "PyInstaller check passed" buildargs=['python', pyinstaller, 'steam_shortcut_manager_cli.py', '-y', '--console', '--onefile', '--clean', '--noupx'] subprocess.call(' '.join(buildargs))
def __init__(self, **kwargs): plugin_path = "" if sys.platform == "win32": if hasattr(sys, "frozen"): plugin_path = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), "PyQt5", "plugins") Logger.log("i", "Adding QT5 plugin path: %s" % (plugin_path)) QCoreApplication.addLibraryPath(plugin_path) else: import site for dir in site.getsitepackages(): QCoreApplication.addLibraryPath(os.path.join(dir, "PyQt5", "plugins")) elif sys.platform == "darwin": plugin_path = os.path.join(Application.getInstallPrefix(), "Resources", "plugins") if plugin_path: Logger.log("i", "Adding QT5 plugin path: %s" % (plugin_path)) QCoreApplication.addLibraryPath(plugin_path) os.environ["QSG_RENDER_LOOP"] = "basic" super().__init__(sys.argv, **kwargs) self._plugins_loaded = False #Used to determine when it's safe to use the plug-ins. self._main_qml = "main.qml" self._engine = None self._renderer = None self._main_window = None self._shutting_down = False self._qml_import_paths = [] self._qml_import_paths.append(os.path.join(os.path.dirname(sys.executable), "qml")) self._qml_import_paths.append(os.path.join(Application.getInstallPrefix(), "Resources", "qml")) self.setAttribute(Qt.AA_UseDesktopOpenGL) try: self._splash = self._createSplashScreen() except FileNotFoundError: self._splash = None else: self._splash.show() self.processEvents() signal.signal(signal.SIGINT, signal.SIG_DFL) # This is done here as a lot of plugins require a correct gl context. If you want to change the framework, # these checks need to be done in your <framework>Application.py class __init__(). i18n_catalog = i18nCatalog("uranium") self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading plugins...")) self._loadPlugins() self.parseCommandLine() Logger.log("i", "Command line arguments: %s", self._parsed_command_line) self._plugin_registry.checkRequiredPlugins(self.getRequiredPlugins()) self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading preferences...")) try: file = Resources.getPath(Resources.Preferences, self.getApplicationName() + ".cfg") Preferences.getInstance().readFromFile(file) except FileNotFoundError: pass
def pytest_report_header(config): print('PYDEVD_USE_CYTHON: %s' % (TEST_CYTHON,)) print('PYDEVD_TEST_JYTHON: %s' % (TEST_JYTHON,)) try: import multiprocessing except ImportError: pass else: print('Number of processors: %s' % (multiprocessing.cpu_count(),)) print('Relevant system paths:') print('sys.prefix: %s' % (sys.prefix,)) if hasattr(sys, 'base_prefix'): print('sys.base_prefix: %s' % (sys.base_prefix,)) if hasattr(sys, 'real_prefix'): print('sys.real_prefix: %s' % (sys.real_prefix,)) if hasattr(site, 'getusersitepackages'): print('site.getusersitepackages(): %s' % (site.getusersitepackages(),)) if hasattr(site, 'getsitepackages'): print('site.getsitepackages(): %s' % (site.getsitepackages(),)) for path in sys.path: if os.path.exists(path) and os.path.basename(path) == 'site-packages': print('Folder with "site-packages" in sys.path: %s' % (path,))
def startRyu(self, learning_switch=True): # start Ryu controller with rest-API python_install_path = site.getsitepackages()[0] # ryu default learning switch # ryu_path = python_install_path + '/ryu/app/simple_switch_13.py' # custom learning switch that installs a default NORMAL action in the # ovs switches dir_path = os.path.dirname(os.path.realpath(__file__)) ryu_path = dir_path + '/son_emu_simple_switch_13.py' ryu_path2 = python_install_path + '/ryu/app/ofctl_rest.py' # change the default Openflow controller port to 6653 (official IANA-assigned port number), as used by Mininet # Ryu still uses 6633 as default ryu_option = '--ofp-tcp-listen-port' ryu_of_port = '6653' ryu_cmd = 'ryu-manager' FNULL = open("/tmp/ryu.log", 'w') if learning_switch: self.ryu_process = Popen( [ryu_cmd, ryu_path, ryu_path2, ryu_option, ryu_of_port], stdout=FNULL, stderr=FNULL) LOG.debug('starting ryu-controller with {0}'.format(ryu_path)) LOG.debug('starting ryu-controller with {0}'.format(ryu_path2)) else: # no learning switch, but with rest api self.ryu_process = Popen( [ryu_cmd, ryu_path2, ryu_option, ryu_of_port], stdout=FNULL, stderr=FNULL) LOG.debug('starting ryu-controller with {0}'.format(ryu_path2)) time.sleep(1)
def _get_default_library_roots(cls): # Provide sensible defaults if not in env vars. import site roots = [sys.prefix] if hasattr(sys, 'base_prefix'): roots.append(sys.base_prefix) if hasattr(sys, 'real_prefix'): roots.append(sys.real_prefix) if hasattr(site, 'getusersitepackages'): site_paths = site.getusersitepackages() if isinstance(site_paths, (list, tuple)): for site_path in site_paths: roots.append(site_path) else: roots.append(site_paths) if hasattr(site, 'getsitepackages'): site_paths = site.getsitepackages() if isinstance(site_paths, (list, tuple)): for site_path in site_paths: roots.append(site_path) else: roots.append(site_paths) for path in sys.path: if os.path.exists(path) and os.path.basename(path) == 'site-packages': roots.append(path) return sorted(set(roots))
def findSitePackagesPath(requestedPath): for i in site.getsitepackages(): path = os.path.join(i, requestedPath) if os.path.exists(path): return path return None
def findSitePackagesPath(requestedPath): for i in site.getsitepackages(): path = os.path.join(i, requestedPath) if os.path.exists(path): return path raise Exception("cannot find file {0}".format(requestedPath))
def list_site_packages_paths(): site_packages_paths = set([site.USER_SITE]) try: site_packages_paths.update(site.getsitepackages()) except AttributeError: pass try: user_site = site.getusersitepackages() if isinstance(user_site, str): site_packages_paths.add(user_site) else: site_packages_paths.update(user_site) except AttributeError: pass try: virtualenv_path = os.environ['VIRTUAL_ENV'] except KeyError: pass else: virtualenv_src_path = os.path.join(virtualenv_path, 'src') site_packages_paths.update( path for path in sys.path if path.startswith(virtualenv_path) and ( 'site-packages' in path or path.startswith(virtualenv_src_path) ) ) return site_packages_paths
def fetch_Landsat8_scene_list(): """ Simple downloads and extracts the most recent version of the scene_list text file for reference http://landsat-pds.s3.amazonaws.com/scene_list.gz :return scene_list_text_data: returns a text data object with all the data on scene inventory on amazon WS. """ print("Updating scene list") # define save path for new scene list directory = site.getsitepackages()[1] gz_path = "{0}/dnppy/landsat/metadata/scene_list.gz".format(directory) txt_path = "{0}/dnppy/landsat/metadata/scene_list.txt".format(directory) # download then extract the gz file to a txt file. download_url("http://landsat-pds.s3.amazonaws.com/scene_list.gz", gz_path) with gzip.open(gz_path,'rb') as gz: content = gz.read() with open(txt_path, 'wb+') as f: f.writelines(content) # build a new text data object from the fresh scene list scene_list_text_data = textio.text_data() scene_list_text_data.read_csv(txt_path, delim = ",", has_headers = True) return scene_list_text_data
def __init__(self): """Initializer, makes ~/cltk_data dirs""" self.cltk_bin_path = os.path.join(site.getsitepackages()[0], 'cltk') # make local CLTK dirs default_cltk_data = '~/cltk_data' cltk_data = os.path.expanduser(default_cltk_data) if os.path.isdir(cltk_data) is True: pass else: os.mkdir(cltk_data) self.orig_files_dir = os.path.join(cltk_data, 'originals') if os.path.isdir(self.orig_files_dir) is True: pass else: os.mkdir(self.orig_files_dir) self.compiled_files_dir = os.path.join(cltk_data, 'compiled') if os.path.isdir(self.compiled_files_dir) is True: pass else: os.mkdir(self.compiled_files_dir) log_path = os.path.join(cltk_data, 'cltk.log') logging.basicConfig(filename=log_path, level=logging.INFO, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
def run(self): if not self.distribution.clamp: raise DistutilsOptionError("Specify the modules to be built into a jar with the 'clamp' setup keyword") jar_dir = os.path.join(site.getsitepackages()[0], "jars") if not os.path.exists(jar_dir): os.mkdir(jar_dir) if self.output_jar_pth: jar_pth_path = os.path.join(site.getsitepackages()[0], "jar.pth") paths = read_pth(jar_pth_path) print "paths in jar.pth", paths paths[self.distribution.metadata.get_name()] = os.path.join("./jars", self.get_jar_name()) write_jar_pth(jar_pth_path, paths) with closing(JarBuilder(output_path=self.output)) as builder: clamp.register_builder(builder) for module in self.distribution.clamp: __import__(module)
self.top_left_x = tlx self.top_left_y = tly self.bot_right_x = brx self.bot_right_y = bry # Read/write function pointer types. _I2C_MULTI_FUNC = CFUNCTYPE(c_int, c_ubyte, c_ubyte) _I2C_READ_FUNC = CFUNCTYPE(c_int, c_ubyte, c_ubyte, POINTER(c_ubyte), c_ubyte) _I2C_WRITE_FUNC = CFUNCTYPE(c_int, c_ubyte, c_ubyte, POINTER(c_ubyte), c_ubyte) # Load VL53L1X shared lib _POSSIBLE_LIBRARY_LOCATIONS = [os.path.dirname(os.path.realpath(__file__))] try: _POSSIBLE_LIBRARY_LOCATIONS += site.getsitepackages() except AttributeError: pass try: _POSSIBLE_LIBRARY_LOCATIONS += [site.getusersitepackages()] except AttributeError: pass for lib_location in _POSSIBLE_LIBRARY_LOCATIONS: files = glob.glob(lib_location + "/vl53l1x_python*.so") if len(files) > 0: lib_file = files[0] try: _TOF_LIBRARY = CDLL(lib_file) _getRangingMeasurementData = _TOF_LIBRARY.getRangingMeasurementData
def main(argv): parser = argparse.ArgumentParser( description="The simple Python package installer for GraalVM") subparsers = parser.add_subparsers( title="Commands", dest="command", metavar="Use COMMAND --help for further help.") subparsers.add_parser("list", help="list locally installed packages") install_parser = subparsers.add_parser( "install", help="install a known package", description="Install a known package. Known packages are " + ", ".join(KNOWN_PACKAGES.keys())) install_parser.add_argument("package", help="comma-separated list") install_parser.add_argument("--prefix", help="user-site path prefix") subparsers.add_parser( "uninstall", help="remove installation folder of a local package", ).add_argument("package", help="comma-separated list") subparsers.add_parser( "pypi", help= "attempt to install a package from PyPI (untested, likely won't work, and it won't install dependencies for you)", description="Attempt to install a package from PyPI" ).add_argument( "package", help= "comma-separated list, can use `==` at the end of a package name to specify an exact version" ) args = parser.parse_args(argv) if args.command == "list": if site.ENABLE_USER_SITE: user_site = site.getusersitepackages() else: for s in site.getsitepackages(): if s.endswith("site-packages"): user_site = s break info("Installed packages:") for p in sys.path: if p.startswith(user_site): info(p[len(user_site) + 1:]) elif args.command == "uninstall": warn( "WARNING: I will only delete the package folder, proper uninstallation is not supported at this time." ) user_site = site.getusersitepackages() for pkg in args.package.split(","): deleted = False for p in sys.path: if p.startswith(user_site): # +1 due to the path separator pkg_name = p[len(user_site) + 1:] if pkg_name.startswith(pkg): if os.path.isdir(p): shutil.rmtree(p) else: os.unlink(p) deleted = True break if deleted: info("Deleted {}", p) else: xit("Unknown package: '%s'" % pkg) elif args.command == "install": for pkg in args.package.split(","): if pkg not in KNOWN_PACKAGES: xit("Unknown package: '%s'" % pkg) else: if args.prefix: KNOWN_PACKAGES[pkg](extra_opts=["--prefix", args.prefix]) else: KNOWN_PACKAGES[pkg]() elif args.command == "pypi": for pkg in args.package.split(","): install_from_pypi(pkg, ignore_errors=False)
def _site_packages(): return [Path(s) for s in site.getsitepackages()]
else: os.popen("msgfmt lang/%s/%s.po -o lang/%s/%s.mo" % (dir, pofile, dir, pofile)) DATA_FILES += [("share/locale/" + dir + "/LC_MESSAGES", ["lang/" + dir + "/" + pofile + ".mo"])] pychess_langs.append(dir) PACKAGES = [] if msi: # TODO: cx_freeze doesn't allow letters in version #VERSION = "0.12.0" ## Get the site-package folder, not everybody will install ## Python into C:\PythonXX site_dir = site.getsitepackages()[1] include_dll_path = os.path.join(site_dir, "gnome") lang_path = os.path.join(site_dir, "gnome", "share", "locale") ## gtk3.0 .mo files gtk_mo = [ f + "/LC_MESSAGES/gtk30.mo" for f in os.listdir(lang_path) if f in pychess_langs ] ## Collect the list of missing dll when cx_freeze builds the app missing_dll = [f for f in os.listdir(include_dll_path) if \ (f.endswith(".dll") or (f.startswith("gspawn") and f.endswith(".exe")))] ## We need to add all the libraries too (for themes, etc..) gtk_libs = [
class TextUserInterface(ui.UserInterface): """This is the main class for Text user interface. .. inheritance-diagram:: TextUserInterface :parts: 3 """ ENVIRONMENT = "anaconda" def __init__(self, storage, payload, instclass, productTitle=u"Anaconda", isFinal=True, quitMessage=None): """ For detailed description of the arguments see the parent class. :param storage: storage backend reference :type storage: instance of pyanaconda.Storage :param payload: payload (usually dnf) reference :type payload: instance of payload handler :param instclass: install class reference :type instclass: instance of install class :param productTitle: the name of the product :type productTitle: str :param isFinal: Boolean that marks the release as final (True) or development (False) version. :type isFinal: bool :param quitMessage: The text to be used in quit dialog question. It should not be translated to allow for change of language. :type quitMessage: str """ ui.UserInterface.__init__(self, storage, payload, instclass) self._meh_interface = meh.ui.text.TextIntf() self.productTitle = productTitle self.isFinal = isFinal self.quitMessage = quitMessage basemask = "pyanaconda.ui" basepath = os.path.dirname(os.path.dirname(__file__)) updatepath = "/tmp/updates/pyanaconda/ui" sitepackages = [os.path.join(dir, "pyanaconda", "ui") for dir in site.getsitepackages()] pathlist = set([updatepath, basepath] + sitepackages) _categories = [] _spokes = [] _hubs = [] # as list comprehension can't reference class level variables in Python 3 we # need to use a for cycle (http://bugs.python.org/issue21161) for path in pathlist: _categories.append((basemask + ".categories.%s", os.path.join(path, "categories"))) _spokes.append((basemask + ".tui.spokes.%s", os.path.join(path, "tui/spokes"))) _hubs.append((basemask + ".tui.hubs.%s", os.path.join(path, "tui/hubs"))) paths = ui.UserInterface.paths + { "categories": _categories, "spokes": _spokes, "hubs": _hubs, } @property def tty_num(self): return 1 @property def meh_interface(self): return self._meh_interface def _list_hubs(self): """Returns the list of hubs to use.""" return [SummaryHub] def _is_standalone(self, spoke): """Checks if the passed spoke is standalone.""" return isinstance(spoke, StandaloneSpoke) def setup(self, data): """Construct all the objects required to implement this interface. This method must be provided by all subclasses. """ # Use GLib event loop for the Simpleline TUI loop = GLibEventLoop() App.initialize(event_loop=loop) loop.set_quit_callback(tui_quit_callback) scheduler = App.get_scheduler() scheduler.quit_screen = YesNoDialog(self.quitMessage) # tell python-meh it should use our raw_input meh_io_handler = meh.ui.text.IOHandler(in_func=scheduler.io_manager.get_user_input_without_check) self._meh_interface.set_io_handler(meh_io_handler) # register handlers for various messages loop = App.get_event_loop() loop.register_signal_handler(ExceptionSignal, exception_msg_handler) loop.register_signal_handler(SendMessageSignal, self._handle_show_message) _hubs = self._list_hubs() # First, grab a list of all the standalone spokes. spokes = self._collectActionClasses(self.paths["spokes"], StandaloneSpoke) actionClasses = self._orderActionClasses(spokes, _hubs) for klass in actionClasses: obj = klass(data, self.storage, self.payload, self.instclass) # If we are doing a kickstart install, some standalone spokes # could already be filled out. In that case, we do not want # to display them. if self._is_standalone(obj) and obj.completed: del(obj) continue if hasattr(obj, "set_path"): obj.set_path("spokes", self.paths["spokes"]) obj.set_path("categories", self.paths["categories"]) should_schedule = obj.setup(self.ENVIRONMENT) if should_schedule: scheduler.schedule_screen(obj) def run(self): """Run the interface. This should do little more than just pass through to something else's run method, but is provided here in case more is needed. This method must be provided by all subclasses. """ return App.run() ### ### MESSAGE HANDLING METHODS ### def _send_show_message(self, msg_fn, args, ret_queue): """ Send message requesting to show some message dialog specified by the message function. :param msg_fn: message dialog function requested to be called :type msg_fn: a function taking the same number of arguments as is the length of the args param :param args: arguments to be passed to the message dialog function :type args: any :param ret_queue: the queue which the return value of the message dialog function should be put :type ret_queue: a queue.Queue instance """ signal = SendMessageSignal(self, msg_fn=msg_fn, args=args, ret_queue=ret_queue) loop = App.get_event_loop() loop.enqueue_signal(signal) def _handle_show_message(self, signal, data): """Handler for the SendMessageSignal signal. :param signal: SendMessage signal :type signal: instance of the SendMessageSignal class :param data: additional data :type data: any """ msg_fn = signal.msg_fn args = signal.args ret_queue = signal.ret_queue ret_queue.put(msg_fn(*args)) def _show_message_in_main_thread(self, msg_fn, args): """ If running in the main thread, run the message dialog function and return its return value. If running in a non-main thread, request the message function to be called in the main thread. :param msg_fn: message dialog function to be run :type msg_fn: a function taking the same number of arguments as is the length of the args param :param args: arguments to be passed to the message dialog function :type args: any """ if threadMgr.in_main_thread(): # call the function directly return msg_fn(*args) else: # create a queue for the result returned by the function ret_queue = queue.Queue() # request the function to be called in the main thread self._send_show_message(msg_fn, args, ret_queue) # wait and return the result from the queue return ret_queue.get() def showError(self, message): """Display an error dialog with the given message. After this dialog is displayed, anaconda will quit. There is no return value. This method must be implemented by all UserInterface subclasses. In the code, this method should be used sparingly and only for critical errors that anaconda cannot figure out how to recover from. """ return self._show_message_in_main_thread(self._showError, (message,)) def _showError(self, message): """Internal helper function that MUST BE CALLED FROM THE MAIN THREAD.""" if flags.automatedInstall and not flags.ksprompt: log.error(message) # If we're in cmdline mode, just exit. return error_window = IpmiErrorDialog(message) ScreenHandler.push_screen_modal(error_window) def showDetailedError(self, message, details, buttons=None): return self._show_message_in_main_thread(self._showDetailedError, (message, details)) def _showDetailedError(self, message, details): """Internal helper function that MUST BE CALLED FROM THE MAIN THREAD.""" return self.showError(message + "\n\n" + details) def showYesNoQuestion(self, message): """Display a dialog with the given message that presents the user a yes or no choice. This method returns True if the yes choice is selected, and False if the no choice is selected. From here, anaconda can figure out what to do next. This method must be implemented by all UserInterface subclasses. In the code, this method should be used sparingly and only for those times where anaconda cannot make a reasonable decision. We don't want to overwhelm the user with choices. When cmdline mode is active, the default will be to answer no. """ return self._show_message_in_main_thread(self._showYesNoQuestion, (message,)) def _showYesNoQuestion(self, message): """Internal helper function that MUST BE CALLED FROM THE MAIN THREAD.""" if flags.automatedInstall and not flags.ksprompt: log.error(message) # If we're in cmdline mode, just say no. return False question_window = YesNoDialog(message) ScreenHandler.push_screen_modal(question_window) question_window.redraw() return question_window.answer
import site import json kontrol = True if kontrol: #talimatname-paket güncellemeleri os.system("clear") os.system("mps -GG") os.system("mps -G") #lsb-release tamiri os.system("mps -g lsb-release") #gereklerin kontrolü-kurulumu paketd = site.getsitepackages() yukluler = os.listdir(paketd[0]) mps_gerekler = "/tmp/komutan.gerekler" kur = "pip3 install " mpskur = "mps kur " kontrol = [""] mpskontrol = [ "python3-pip", "python-yaml", "python3-yaml", "python3-pythondialog" ] for mpsk in mpskontrol: if os.path.exists("/var/lib/pkg/DB/" + mpsk) is False: os.system("mps kur " + mpsk) else: print(mpsk, "zaten kurulu")
"""A test runner for pywin32""" import sys import os import site import subprocess # locate the dirs based on where this script is - it may be either in the # source tree, or in an installed Python 'Scripts' tree. this_dir = os.path.dirname(__file__) site_packages = [ site.getusersitepackages(), ] + site.getsitepackages() failures = [] # Run a test using subprocess and wait for the result. # If we get an returncode != 0, we know that there was an error, but we don't # abort immediately - we run as many tests as we can. def run_test(script, cmdline_extras): dirname, scriptname = os.path.split(script) # some tests prefer to be run from their directory. cmd = [sys.executable, "-u", scriptname] + cmdline_extras result = subprocess.run(cmd, check=False, cwd=dirname) print("*** Test script '%s' exited with %s" % (script, result.returncode)) sys.stdout.flush() if result.returncode: failures.append(script) def find_and_run(possible_locations, extras):
from setuptools.command.build_py import build_py from setuptools.command.install import install from setuptools.command.develop import develop import site import subprocess import sys import numpy as np root_dir = osp.dirname(osp.abspath(__file__)) build_subdir = 'build' morlet_dir = osp.join(root_dir, 'ptsa', 'extensions', 'morlet') extensions_dir = osp.join(root_dir, 'ptsa', 'extensions') circ_stat_dir = osp.join(root_dir, 'ptsa', 'extensions', 'circular_stat') for path in site.getsitepackages(): if path.endswith("site-packages"): site_packages = path break else: raise RuntimeError("site-packages not found?!?") # see recipe http://stackoverflow.com/questions/12491328/python-distutils-not-include-the-swig-generated-module # for windows install see http://stackoverflow.com/questions/2817869/error-unable-to-find-vcvarsall-bat # for visual studio compilation you need to SET VS90COMNTOOLS=%VS140COMNTOOLS% if sys.platform.startswith("win"): os.environ["VS90COMNTOOLS"] = os.environ["VS140COMNTOOLS"] @contextmanager
# Load all plugin libraries from site-packages/tensorflow-plugins if we are # running under pip. # TODO(gunan): Enable setting an environment variable to define arbitrary plugin # directories. # TODO(gunan): Find a better location for this code snippet. from tensorflow.python.framework import load_library as _ll from tensorflow.python.lib.io import file_io as _fi # Get sitepackages directories for the python installation. _site_packages_dirs = [] _site_packages_dirs += [] if _site.USER_SITE is None else [_site.USER_SITE] _site_packages_dirs += [_p for _p in _sys.path if 'site-packages' in _p] if 'getsitepackages' in dir(_site): _site_packages_dirs += _site.getsitepackages() if 'sysconfig' in dir(_distutils): _site_packages_dirs += [_distutils.sysconfig.get_python_lib()] _site_packages_dirs = list(set(_site_packages_dirs)) # Find the location of this exact file. _current_file_location = _inspect.getfile(_inspect.currentframe()) def _running_from_pip_package(): return any( _current_file_location.startswith(dir_) for dir_ in _site_packages_dirs) if _running_from_pip_package(): # TODO(gunan): Add sanity checks to loaded modules here.
import pkgutil import importlib import sys import copy import imp import inspect import site import os import pprint site_package_paths = map(os.path.realpath, site.getsitepackages()) def main(): before_module = set([str(i) for i in sys.modules]) relative_path = sys.argv[1] if len(sys.argv) > 1 else 'tests/test.py' name = relative_path[:-3].replace("/", ".") mod = importlib.import_module(name) extra = set() for i, j in sys.modules.iteritems(): if i not in before_module and j is not None: extra.add(j) to_export = [] for i in extra: if hasattr(i, '__file__') and any( map(lambda r: i.__file__.startswith(r), site_package_paths)): to_export.append(i.__file__) pprint.pprint(sorted(filter(lambda r: '__init__.py' in r, to_export)))
# # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # Written by Pay Giesselmann # --------------------------------------------------------------------------------- import os, sys, site if __name__ == '__main__': with open(os.path.join(site.getsitepackages()[0], 'nanopype.pth'), 'w') as fp: print( 'import os; os.environ["PATH"] = "{dir}" + os.pathsep + os.environ["PATH"]' .format(dir=os.path.abspath(sys.argv[1])), file=fp)
## <http://www.gnu.org/licenses/>. from __future__ import unicode_literals import os import re import site import sys # avoid "dll Hell" on Windows by getting PyEnchant to use PyGObject's # copy of libenchant and associated libraries if sys.platform == 'win32': # disable PyEnchant's forced use of its bundled DLLs sys.platform = 'win32x' # add gnome DLLs to PATH for name in site.getsitepackages(): gnome_path = os.path.join(name, 'gnome') if os.path.isdir(gnome_path): os.environ['PATH'] = gnome_path + ';' + os.environ['PATH'] break try: import enchant except ImportError: enchant = None if sys.platform == 'win32x': # reset sys.platform sys.platform = 'win32' # using PyGObject's copy of libenchant means it won't find the # dictionaries installed with PyEnchant
if (os.environ.get('READTHEDOCS') == "True") is False: try: sys.path.insert(0, os.path.abspath('..')) import fwdpy11 except: sys.path.pop(0) try: import fwdpy11 if fwdpy11.__version__ != "0.5.3": raise RuntimeError( "installed fwdpp detected, but unexpected version found") except: raise else: import site p = site.getsitepackages()[0] sys.path.insert(0, p) nitpicky = True nitpick_ignore = [('py:class', 'pybind11_builtins.pybind11_object'), ('py:class', 'fwdpy11._regions.Region'), ('py:class', 'fwdpy11._regions.Sregion'), ('py:class', 'object'), ('py:obj', 'tuple')] # subprocess.Popen(['doxygen', 'fwdpp.doxygen'],env=my_env) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be
mport sys import sysconfig import site if __name__ == '__main__': # This is a hack due to the distutils in debian/ubuntu's python3 being misconfigured # see discussion https://github.com/opencog/atomspace/issues/1782 # # If the bug is fixed, this script could be replaced by: # # from distutils.sysconfig import get_python_lib; print(get_python_lib(plat_specific=True, prefix=prefix)) # # However, using this would not respect a python virtual environments, so in a way this is better! prefix = sys.argv[1] # use sites if the prefix is recognized and the sites module is available # (virtualenv is missing getsitepackages()) if hasattr(site, 'getsitepackages'): paths = [p for p in site.getsitepackages() if p.startswith(prefix)] if len(paths) == 1: print(paths[0]) exit(0) # use sysconfig platlib as the fall back print(sysconfig.get_paths()['platlib'])
def test_init( self, configuration_validate, os_environ, json_load, sha1, builtins_open, access, _expanduser, exists, isdir, _abspath, ) -> None: sha1_mock = MagicMock() sha1_mock.hexdigest = lambda: "HASH" sha1.return_value = sha1_mock exists.return_value = True json_load.side_effect = [ { "source_directories": ["a"], "logger": "/usr/logger", "ignore_all_errors": ["buck-out/dev/gen"], }, {}, ] configuration = Configuration() self.assertEqual(configuration.source_directories, ["a"]) self.assertEqual(configuration.targets, []) self.assertEqual(configuration.logger, "/usr/logger") self.assertEqual(configuration.ignore_all_errors, ["buck-out/dev/gen"]) self.assertEqual(configuration.file_hash, None) json_load.side_effect = [ { "source_directories": ["a"] }, { "source_directories": ["a"] }, {}, ] configuration = Configuration("local/path") self.assertEqual(configuration.source_directories, ["local/path/a"]) json_load.side_effect = [{"targets": ["//a/b/c"], "disabled": 1}, {}] configuration = Configuration() self.assertEqual(configuration.targets, ["//a/b/c"]) self.assertEqual(configuration.source_directories, []) self.assertEqual(configuration.version_hash, "unversioned") self.assertEqual(configuration.logger, None) self.assertEqual(configuration.ignore_all_errors, []) self.assertEqual(configuration.file_hash, None) self.assertTrue(configuration.disabled) json_load.side_effect = [{"typeshed": "TYPESHED/"}, {}] configuration = Configuration() self.assertEqual(configuration.typeshed, "TYPESHED/") self.assertEqual(configuration.number_of_workers, number_of_workers()) self.assertEqual(configuration.file_hash, None) python_paths = site.getsitepackages() json_load.side_effect = [{"search_path": [{"site-package": "abc"}]}] configuration = Configuration() for python_path in python_paths: self.assertIn("{}$abc".format(python_path), configuration.search_path) json_load.side_effect = [ { "search_path": ["additional/"], "version": "VERSION", "typeshed": "TYPE/%V/SHED/", "workers": 20, }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, [SearchPathElement("additional/")]) self.assertEqual(configuration.number_of_workers, 20) self.assertEqual(configuration.taint_models_path, []) self.assertEqual(configuration.file_hash, None) self.assertEqual(configuration.strict, False) json_load.side_effect = [ { "search_path": ["additional/"], "version": "VERSION", "typeshed": "TYPE/%V/SHED/", "workers": 20, "strict": True, }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, [SearchPathElement("additional/")]) self.assertEqual(configuration.number_of_workers, 20) self.assertEqual(configuration.taint_models_path, []) self.assertEqual(configuration.file_hash, None) self.assertEqual(configuration.strict, True) json_load.side_effect = [ { "search_path": [ "additional/", { "root": "root/", "subdirectory": "subdirectory" }, ], "version": "VERSION", "typeshed": "TYPE/%V/SHED/", "workers": 20, }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["additional/", "root/$subdirectory"]) self.assertEqual(configuration.number_of_workers, 20) self.assertEqual(configuration.file_hash, None) self.assertEqual(configuration.taint_models_path, []) json_load.side_effect = [ { "search_path": [{ "woot": "root/", "subdirectory": "subdirectory" }], "version": "VERSION", "typeshed": "TYPE/%V/SHED/", "workers": 20, }, {}, ] with self.assertRaises(InvalidConfiguration): Configuration() json_load.side_effect = [ { "search_path": "simple_string/", "version": "VERSION", "typeshed": "TYPE/%V/SHED/", "taint_models_path": ".pyre/taint_models", }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["simple_string/"]) self.assertEqual(configuration.taint_models_path, [".pyre/taint_models"]) json_load.side_effect = [ { "search_path": "simple_string/", "version": "VERSION", "typeshed": "TYPE/%V/SHED/", "taint_models_path": [".pyre/taint_models_1", ".pyre/taint_models_2"], }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["simple_string/"]) self.assertEqual( configuration.taint_models_path, [".pyre/taint_models_1", ".pyre/taint_models_2"], ) def directory_side_effect(path: str) -> str: if path.endswith(".pyre_configuration"): return "/root" elif path.endswith(".pyre_configuration.local"): return "/root/local" else: return path with patch("os.path.dirname", side_effect=directory_side_effect): json_load.side_effect = [ { "binary": "some/dir/pyre.bin", "typeshed": "some/typeshed" }, {}, ] configuration = Configuration() self.assertEqual(configuration.binary, "/root/some/dir/pyre.bin") self.assertEqual(configuration.typeshed, "/root/some/typeshed") json_load.side_effect = [ { "binary": "~/some/dir/pyre.bin", "typeshed": "~/some/typeshed" }, {}, ] configuration = Configuration() self.assertEqual(configuration.binary, "/home/user/some/dir/pyre.bin") self.assertEqual(configuration.typeshed, "/home/user/some/typeshed") json_load.side_effect = [ { "binary": "some/%V/pyre.bin", "typeshed": "some/%V/typeshed", "version": "VERSION", }, {}, ] configuration = Configuration() self.assertEqual(configuration.binary, "/root/some/VERSION/pyre.bin") self.assertEqual(configuration.typeshed, "/root/some/VERSION/typeshed") json_load.side_effect = [ { "binary": "~/some/%V/pyre.bin", "typeshed": "~/some/%V/typeshed", "version": "VERSION", }, {}, ] configuration = Configuration() self.assertEqual(configuration.binary, "/home/user/some/VERSION/pyre.bin") self.assertEqual(configuration.typeshed, "/home/user/some/VERSION/typeshed") json_load.side_effect = [ { "ignore_all_errors": ["abc/def", "/abc/def", "~/abc/def"] }, {}, ] configuration = Configuration() self.assertEqual( configuration.ignore_all_errors, ["/root/abc/def", "/abc/def", "/home/user/abc/def"], ) json_load.side_effect = [ { "taint_models_path": ".pyre/taint_models", "search_path": "simple_string/", "version": "VERSION", "typeshed": "/TYPE/%V/SHED/", }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "/TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["simple_string/"]) self.assertEqual(configuration.taint_models_path, ["/root/.pyre/taint_models"]) json_load.side_effect = [ { "taint_models_path": ".pyre/taint_models" }, { "search_path": "simple_string/", "version": "VERSION", "typeshed": "/TYPE/%V/SHED/", }, ] configuration = Configuration( local_configuration="/root/local/.pyre_configuration.local") self.assertEqual(configuration.typeshed, "/TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["simple_string/"]) self.assertEqual(configuration.taint_models_path, ["/root/local/.pyre/taint_models"]) json_load.side_effect = [ { "taint_models_path": ".pyre/taint_models" }, { "search_path": "simple_string/", "version": "VERSION", "taint_models_path": "global/taint_models", "typeshed": "/TYPE/%V/SHED/", }, ] configuration = Configuration( local_configuration="/root/local/.pyre_configuration.local") self.assertEqual(configuration.typeshed, "/TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["simple_string/"]) self.assertEqual( configuration.taint_models_path, [ "/root/local/.pyre/taint_models", "/root/global/taint_models" ], ) json_load.side_effect = [ { "search_path": "simple_string/", "version": "VERSION", "typeshed": "/TYPE/%V/SHED/", "saved_state": "some_name", }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "/TYPE/VERSION/SHED/") self.assertEqual(configuration.search_path, ["simple_string/"]) self.assertEqual(configuration.file_hash, "HASH") json_load.side_effect = [ { "search_path": [ "~/simple", { "root": "~/simple", "subdirectory": "subdir" }, ], "typeshed": "~/typeshed", "source_directories": ["a", "~/b"], "binary": "~/bin", }, {}, ] configuration = Configuration() self.assertEqual(configuration.search_path, ["/home/user/simple", "/home/user/simple$subdir"]) self.assertEqual(configuration.typeshed, "/home/user/typeshed") self.assertEqual(configuration.source_directories, ["a", "/home/user/b"]) self.assertEqual(configuration.binary, "/home/user/bin") # Test loading of additional directories in the search path # via environment $PYTHONPATH. json_load.side_effect = [ { "search_path": ["json/", "file/"], "typeshed": "/TYPESHED/" }, {}, ] with patch.object(os, "getenv", return_value="additional/:directories/"): with patch.object(os.path, "isdir", return_value=True): configuration = Configuration( search_path=["command/", "line/"], preserve_pythonpath=True) self.assertEqual(configuration.typeshed, "/TYPESHED/") self.assertEqual( configuration.search_path, [ SearchPathElement("additional/"), SearchPathElement("directories/"), *[ SearchPathElement(i) for i in sys.path if os.path.isdir(i) ], SearchPathElement("command/"), SearchPathElement("line/"), SearchPathElement("json/"), SearchPathElement("file/"), ], ) # Test case where we ignore the PYTHONPATH environment variable. json_load.side_effect = [ { "search_path": ["json/", "file/"], "typeshed": "/TYPESHED/" }, {}, ] with patch.object(os, "getenv", return_value="additional/:directories/"): with patch.object(os.path, "isdir", return_value=True): configuration = Configuration( search_path=["command/", "line/"], preserve_pythonpath=False) self.assertEqual(configuration.typeshed, "/TYPESHED/") self.assertEqual(configuration.search_path, ["command/", "line/", "json/", "file/"]) # Test manual loading of the binary json_load.side_effect = [{}, {}] configuration = Configuration(binary="some/file/path/") self.assertEqual(configuration.binary, "some/file/path/") # Test manual loading of typeshed directory. json_load.side_effect = [{}, {}] configuration = Configuration(typeshed="some/directory/path/") self.assertEqual(configuration.typeshed, "some/directory/path/") json_load.side_effect = [{"binary": "/binary"}, {}] configuration = Configuration() self.assertEqual(configuration.binary, "/binary") json_load.side_effect = [{ "version": "VERSION", "binary": "/%V/binary" }, {}] configuration = Configuration() self.assertEqual(configuration.binary, "/VERSION/binary") # Test version override with patch.object(os, "getenv", return_value="VERSION_HASH"): json_load.side_effect = [{}, {}] configuration = Configuration() self.assertEqual(configuration.version_hash, "VERSION_HASH") with patch.object(os, "getenv", return_value="VERSION_HASH"): json_load.side_effect = [ { "version": "NOT_THIS_VERSION", "typeshed": "/TYPE/%V/SHED/" }, {}, ] configuration = Configuration() self.assertEqual(configuration.typeshed, "/TYPE/VERSION_HASH/SHED/") # Test buck builder fields json_load.side_effect = [{"use_buck_builder": True}, {}] configuration = Configuration() self.assertTrue(configuration.use_buck_builder) json_load.side_effect = [{"use_buck_builder": False}, {}] configuration = Configuration() self.assertFalse(configuration.use_buck_builder) json_load.side_effect = [{}, {}] configuration = Configuration() self.assertFalse(configuration.use_buck_builder) # Test multiple definitions of the ignore_all_errors files. json_load.side_effect = [ { "ignore_all_errors": ["buck-out/dev/gen"] }, { "ignore_all_errors": ["buck-out/dev/gen2"] }, ] configuration = Configuration() self.assertEqual(configuration.ignore_all_errors, ["buck-out/dev/gen"]) # Normalize number of workers if zero. json_load.side_effect = [{"typeshed": "/TYPESHED/", "workers": 0}, {}] configuration = Configuration() self.assertEqual(configuration.typeshed, "/TYPESHED/") self.assertEqual(configuration.number_of_workers, number_of_workers()) # Test excludes json_load.side_effect = [{"exclude": "regexp"}, {}] configuration = Configuration() self.assertEqual(configuration.excludes, ["regexp"]) json_load.side_effect = [{"exclude": ["regexp1", "regexp2"]}, {}] configuration = Configuration() self.assertEqual(configuration.excludes, ["regexp1", "regexp2"]) json_load.side_effect = [{"exclude": ["regexp1", "regexp2"]}, {}] configuration = Configuration(excludes=["regexp3", "regexp4"]) self.assertEqual(configuration.excludes, ["regexp3", "regexp4", "regexp1", "regexp2"]) # Test extensions json_load.side_effect = [{"extensions": [".a", ".b"]}, {}] configuration = Configuration() self.assertEqual(configuration.extensions, [".a", ".b"])
# # This toplevel file is a little messy at the moment... (2001-06-22) # ...still messy (2013-07-12) import os import site coverage = None proc_cmdline = open("/proc/cmdline", "r").read() proc_cmdline = proc_cmdline.split() if ("inst.debug=1" in proc_cmdline) or ("inst.debug" in proc_cmdline): import coverage pyanaconda_dir = "pyanaconda" for sitepkg in site.getsitepackages(): possible_dir = os.path.join(sitepkg, "pyanaconda") if os.path.isdir(possible_dir): pyanaconda_dir = possible_dir break cov = coverage.coverage(data_file="/mnt/sysimage/root/anaconda.coverage", branch=True, source=["/usr/sbin/anaconda", pyanaconda_dir]) cov.start() import atexit, sys, time, signal import pid def exitHandler(rebootData, storage): # Clear the list of watched PIDs.
def run_cstm_cmd(self): # append binary folder to PYTHON PATH if self.tools: with open(os.path.join(site.getsitepackages()[0], 'nanopype.pth'), 'w') as fp: print('import os; os.environ["PATH"] = "{dir}" + os.pathsep + os.environ["PATH"]'.format(dir=os.path.abspath(self.tools)), file=fp)
def _get_site_packages(): try: from site import getsitepackages return set(getsitepackages()) except ImportError: return set()
import os import platform import site import versioneer from setuptools import setup, find_packages, Extension SYS_PLATFORM = platform.system().lower() IS_LINUX = 'linux' in SYS_PLATFORM IS_OSX = 'darwin' == SYS_PLATFORM IS_WIN = 'windows' == SYS_PLATFORM # Get Numpy include path without importing it NUMPY_INC_PATHS = [ os.path.join(r, 'numpy', 'core', 'include') for r in site.getsitepackages() if os.path.isdir(os.path.join(r, 'numpy', 'core', 'include')) ] if len(NUMPY_INC_PATHS) == 0: try: import numpy as np except ImportError: raise ValueError( "Could not find numpy include dir and numpy not installed before build - " "cannot proceed with compilation of cython modules.") else: # just ask numpy for it's include dir NUMPY_INC_PATHS = [np.get_include()] elif len(NUMPY_INC_PATHS) > 1: print("Found {} numpy include dirs: "
from glob import glob # Determine if the user has specified which paths to report coverage for is_include_arg = map(lambda x: bool(re.search('^--include=', x)), sys.argv) if True in is_include_arg: include_arg = sys.argv[is_include_arg.index(True)] include_arg = sys.argv[is_include_arg.index(True)] del sys.argv[is_include_arg.index(True)] else: include_arg = None # Uninstall old versions of GSLab-Tools re_gslab = re.compile('gslab[-_].', re.IGNORECASE) re_gencat = re.compile('gencat') package_locations = site.getsitepackages() for package_location in package_locations: try: packages = os.listdir(package_location) except OSError: continue for package in packages: if re_gslab.match(package) or re_gencat.match(package): shutil.rmtree(os.path.join(package_location, package)) # Additional build commands class TestRepo(build_py): '''Build command for running tests in repo''' def run(self): if include_arg:
from urllib.request import urlopen except ImportError: from urllib2 import urlopen ez = {} exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez) if not options.allow_site_packages: # ez_setup imports site, which adds site packages # this will remove them from the path to ensure that incompatible versions # of setuptools are not in the path import site # inside a virtualenv, there is no 'getsitepackages'. # We can't remove these reliably if hasattr(site, 'getsitepackages'): for sitepackage_path in site.getsitepackages(): sys.path[:] = [x for x in sys.path if sitepackage_path not in x] setup_args = {'to_dir': tmpeggs, 'download_delay': 0} ez['use_setuptools'](**setup_args) import setuptools import pkg_resources # This does not (always?) update the default working set. We will # do it. for path in sys.path: if path not in pkg_resources.working_set.entries: pkg_resources.working_set.add_entry(path) ###################################################################### # Install buildout
class GraphicalUserInterface(UserInterface): """This is the standard GTK+ interface we try to steer everything to using. It is suitable for use both directly and via VNC. """ def __init__(self, storage, payload, instclass, distributionText=product.distributionText, isFinal=product.isFinal, quitDialog=QuitDialog, gui_lock=None, fullscreen=False): UserInterface.__init__(self, storage, payload, instclass) self._actions = [] self._currentAction = None self._ui = None self._gui_lock = gui_lock self.data = None self.mainWindow = MainWindow(fullscreen=fullscreen) self._distributionText = distributionText self._isFinal = isFinal self._quitDialog = quitDialog self._mehInterface = GraphicalExceptionHandlingIface( self.mainWindow.lightbox_on) ANACONDA_WINDOW_GROUP.add_window(self.mainWindow) basemask = "pyanaconda.ui" basepath = os.path.dirname(os.path.dirname(__file__)) updatepath = "/tmp/updates/pyanaconda/ui" sitepackages = [ os.path.join(dir, "pyanaconda", "ui") for dir in site.getsitepackages() ] pathlist = set([updatepath, basepath] + sitepackages) _categories = [] _spokes = [] _hubs = [] # as list comprehension can't reference class level variables in Python 3 we # need to use a for cycle (http://bugs.python.org/issue21161) for path in pathlist: _categories.append( (basemask + ".categories.%s", os.path.join(path, "categories"))) _spokes.append( (basemask + ".gui.spokes.%s", os.path.join(path, "gui/spokes"))) _hubs.append( (basemask + ".gui.hubs.%s", os.path.join(path, "gui/hubs"))) paths = UserInterface.paths + { "categories": _categories, "spokes": _spokes, "hubs": _hubs, } def _widgetScale(self): # First, check if the GDK_SCALE environment variable is already set. If so, # leave it alone. if "GDK_SCALE" in os.environ: log.debug("GDK_SCALE already set to %s, not scaling", os.environ["GDK_SCALE"]) return # Next, check if a scaling factor is already being applied via XSETTINGS, # such as by gnome-settings-daemon display = Gdk.Display.get_default() screen = display.get_default_screen() val = GObject.Value() val.init(GObject.TYPE_INT) if screen.get_setting("gdk-window-scaling-factor", val): log.debug("Window scale set to %s by XSETTINGS, not scaling", val.get_int()) return # Get the primary monitor dimensions in pixels and mm from Gdk primary = screen.get_primary_monitor() monitor_geometry = screen.get_monitor_geometry(primary) monitor_scale = screen.get_monitor_scale_factor(primary) monitor_width_mm = screen.get_monitor_width_mm(primary) monitor_height_mm = screen.get_monitor_height_mm(primary) # Sometimes gdk returns 0 for physical widths and heights if monitor_height_mm == 0 or monitor_width_mm == 0: return # Check if this monitor is high DPI, using heuristics from gnome-settings-dpi. # If the monitor has a height >= 1200 pixels and a resolution > 192 dpi in both # x and y directions, apply a scaling factor of 2 so that anaconda isn't all tiny monitor_width_px = monitor_geometry.width * monitor_scale monitor_height_px = monitor_geometry.height * monitor_scale monitor_dpi_x = monitor_width_px / (monitor_width_mm / 25.4) monitor_dpi_y = monitor_height_px / (monitor_height_mm / 25.4) log.debug("Detected primary monitor: %dx%d %ddpix %ddpiy", monitor_width_px, monitor_height_px, monitor_dpi_x, monitor_dpi_y) if monitor_height_px >= 1200 and monitor_dpi_x > 192 and monitor_dpi_y > 192: display.set_window_scale(2) # Export the scale so that Gtk programs launched by anaconda are also scaled iutil.setenv("GDK_SCALE", "2") def _convertSignals(self): # What tends to happen when we receive a signal is that the signal will # be received by the python interpreter's C handler, python will do # what it needs to do to set the python handler we registered to run, # the C handler returns, and then nothing happens because Gtk is # holding the global interpreter lock. The signal then gets delivered # to our python code when you move the mouse or something. We can get # around this by doing signals the GLib way. The conversion assumes # that none of our signal handlers care about the frame parameter, # which is generally true. # # After the unix_signal_add call, signal.getsignal will tell a half # truth: the method returned will still be called, by way of # _signal_converter, but GLib will have replaced the actual signal # handler for that signal. # Convert everything except SIGCHLD, because that's a different can of worms def _signal_converter(user_data): (handler, signum) = user_data handler(signum, None) for signum in (s for s in range(1, signal.NSIG) if s != signal.SIGCHLD): handler = signal.getsignal(signum) if handler and handler not in (signal.SIG_DFL, signal.SIG_IGN): # NB: if you are looking at the glib documentation you are in for # some surprises because gobject-introspection is a minefield. # g_unix_signal_add_full comes out as GLib.unix_signal_add, and # g_unix_signal_add doesn't come out at all. GLib.unix_signal_add(GLib.PRIORITY_DEFAULT, signum, _signal_converter, (handler, signum)) @property def tty_num(self): return 6 @property def meh_interface(self): return self._mehInterface def _list_hubs(self): """Return a list of Hub classes to be imported to this interface""" from pyanaconda.ui.gui.hubs.summary import SummaryHub from pyanaconda.ui.gui.hubs.progress import ProgressHub return [SummaryHub, ProgressHub] def _is_standalone(self, obj): """Is the spoke passed as obj standalone?""" from pyanaconda.ui.gui.spokes import StandaloneSpoke return isinstance(obj, StandaloneSpoke) def setup(self, data): self._actions = self.getActionClasses(self._list_hubs()) self.data = data def getActionClasses(self, hubs): """Grab all relevant standalone spokes, add them to the passed list of hubs and order the list according to the relationships between hubs and standalones.""" from pyanaconda.ui.gui.spokes import StandaloneSpoke # First, grab a list of all the standalone spokes. standalones = self._collectActionClasses(self.paths["spokes"], StandaloneSpoke) # Second, order them according to their relationship return self._orderActionClasses(standalones, hubs) def _instantiateAction(self, actionClass): # Instantiate an action on-demand, passing the arguments defining our # spoke API and setting up continue/quit signal handlers. obj = actionClass(self.data, self.storage, self.payload, self.instclass) # set spoke search paths in Hubs if hasattr(obj, "set_path"): obj.set_path("spokes", self.paths["spokes"]) obj.set_path("categories", self.paths["categories"]) # If we are doing a kickstart install, some standalone spokes # could already be filled out. In that case, we do not want # to display them. if self._is_standalone(obj) and obj.completed: del (obj) return None # Use connect_after so classes can add actions before we change screens obj.window.connect_after("continue-clicked", self._on_continue_clicked) obj.window.connect_after("help-button-clicked", self._on_help_clicked, obj) obj.window.connect_after("quit-clicked", self._on_quit_clicked) return obj def run(self): (success, _args) = Gtk.init_check(None) if not success: raise RuntimeError("Failed to initialize Gtk") # Check if the GUI lock has already been taken if self._gui_lock and not self._gui_lock.acquire(False): # Gtk main loop running. That means python-meh caught exception # and runs its main loop. Do not crash Gtk by running another one # from a different thread and just wait until python-meh is # finished, then quit. unbusyCursor() log.error("Unhandled exception caught, waiting for python-meh to "\ "exit") threads.threadMgr.wait_for_error_threads() sys.exit(1) try: # Apply a widget-scale to hidpi monitors self._widgetScale() while not self._currentAction: self._currentAction = self._instantiateAction(self._actions[0]) if not self._currentAction: self._actions.pop(0) if not self._actions: return self._currentAction.initialize() self._currentAction.entry_logger() self._currentAction.refresh() self._currentAction.window.set_beta(not self._isFinal) self._currentAction.window.set_property( "distribution", self._distributionText().upper()) # Set some program-wide settings. settings = Gtk.Settings.get_default() settings.set_property("gtk-font-name", "Cantarell") # Apply the application stylesheet css_path = os.environ.get("ANACONDA_DATA", "/usr/share/anaconda") css_path = os.path.join(css_path, "anaconda-gtk.css") provider = Gtk.CssProvider() provider.load_from_path(css_path) Gtk.StyleContext.add_provider_for_screen( Gdk.Screen.get_default(), provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION) # Apply the installclass stylesheet if self.instclass.stylesheet: provider = Gtk.CssProvider() provider.load_from_path(self.instclass.stylesheet) Gtk.StyleContext.add_provider_for_screen( Gdk.Screen.get_default(), provider, STYLE_PROVIDER_PRIORITY_INSTALLCLASS) # Look for updates to the stylesheet and apply them at a higher priority for updates_dir in ("updates", "product"): updates_css = "/run/install/%s/anaconda-gtk.css" % updates_dir if os.path.exists(updates_css): provider = Gtk.CssProvider() provider.load_from_path(updates_css) Gtk.StyleContext.add_provider_for_screen( Gdk.Screen.get_default(), provider, STYLE_PROVIDER_PRIORITY_UPDATES) self.mainWindow.setCurrentAction(self._currentAction) # Do this at the last possible minute. unbusyCursor() # If anything went wrong before we start the Gtk main loop, release # the gui lock and re-raise the exception so that meh can take over except Exception: self._gui_lock.release() raise Gtk.main() ### ### MESSAGE HANDLING METHODS ### @gtk_action_wait def showError(self, message): dlg = ErrorDialog(None) with self.mainWindow.enlightbox(dlg.window): dlg.refresh(message) dlg.run() dlg.window.destroy() # the dialog has the only button -- "Exit installer", so just do so sys.exit(1) @gtk_action_wait def showDetailedError(self, message, details, buttons=None): from pyanaconda.ui.gui.spokes.lib.detailederror import DetailedErrorDialog buttons = buttons or [C_("GUI|Detailed Error Dialog", "_Quit")] dlg = DetailedErrorDialog(None, buttons=buttons, label=message) with self.mainWindow.enlightbox(dlg.window): dlg.refresh(details) rc = dlg.run() dlg.window.destroy() return rc @gtk_action_wait def showYesNoQuestion(self, message): dlg = Gtk.MessageDialog(flags=Gtk.DialogFlags.MODAL, message_type=Gtk.MessageType.QUESTION, buttons=Gtk.ButtonsType.NONE, message_format=message) dlg.set_decorated(False) dlg.add_buttons(C_("GUI|Yes No Dialog", "_No"), 0, C_("GUI|Yes No Dialog", "_Yes"), 1) dlg.set_default_response(1) with self.mainWindow.enlightbox(dlg): rc = dlg.run() dlg.destroy() return bool(rc) ### ### SIGNAL HANDLING METHODS ### def _on_continue_clicked(self, win, user_data=None): # Autostep needs to be triggered just before switching to the next screen # (or before quiting the installation if there are no more screens) to be consistent # in both fully automatic kickstart installation and for installation with an incomplete # kickstart. Therefore we basically "hook" the continue-clicked signal, start autostepping # and ignore any other continue-clicked signals until autostep is done. # Once autostep finishes, it emits the appropriate continue-clicked signal itself, # switching to the next screen (if any). if self.data.autostep.seen and self._currentAction.handles_autostep: if self._currentAction.autostepRunning: log.debug( "ignoring the continue-clicked signal - autostep is running" ) return elif not self._currentAction.autostepDone: self._currentAction.autostep() return if not win.get_may_continue() or win != self._currentAction.window: return # The continue button may still be clickable between this handler finishing # and the next window being displayed, so turn the button off. win.set_may_continue(False) # If we're on the last screen, clicking Continue quits. if len(self._actions) == 1: # save the screenshots to the installed system before killing Anaconda # (the kickstart post scripts run to early, so we need to copy the screenshots now) iutil.save_screenshots() Gtk.main_quit() return nextAction = None ndx = 0 # If the current action wants us to jump to an arbitrary point ahead, # look for where that is now. if self._currentAction.skipTo: found = False for ndx in range(1, len(self._actions)): if self._actions[ ndx].__class__.__name__ == self._currentAction.skipTo: found = True break # If we found the point in question, compose a new actions list # consisting of the current action, the one to jump to, and all # the ones after. That means the rest of the code below doesn't # have to change. if found: self._actions = [self._actions[0]] + self._actions[ndx:] # _instantiateAction returns None for actions that should not be # displayed (because they're already completed, for instance) so skip # them here. while not nextAction: nextAction = self._instantiateAction(self._actions[1]) if not nextAction: self._actions.pop(1) if not self._actions: sys.exit(0) return nextAction.initialize() nextAction.window.set_beta(self._currentAction.window.get_beta()) nextAction.window.set_property("distribution", self._distributionText().upper()) if not nextAction.showable: self._currentAction.window.hide() self._actions.pop(0) self._on_continue_clicked(nextAction) return self._currentAction.exit_logger() nextAction.entry_logger() nextAction.refresh() # Do this last. Setting up curAction could take a while, and we want # to leave something on the screen while we work. self.mainWindow.setCurrentAction(nextAction) self._currentAction = nextAction self._actions.pop(0) def _on_help_clicked(self, window, obj): # the help button has been clicked, start the yelp viewer with # content for the current screen ihelp.start_yelp(ihelp.get_help_path(obj.helpFile, self.instclass)) def _on_quit_clicked(self, win, userData=None): if not win.get_quit_button(): return dialog = self._quitDialog(None) with self.mainWindow.enlightbox(dialog.window): rc = dialog.run() dialog.window.destroy() if rc == 1: self._currentAction.exit_logger() iutil.ipmi_abort(scripts=self.data.scripts) sys.exit(0)
import site import numpy as np import math import matplotlib.pyplot as plt # site packages print(site.getsitepackages()) # array -> dictionary A = ['one', 'two', 'three'] hashmap = dict(np.ndenumerate(A)) # 1d vector insertion a = np.array([[1, 2, 3]]).reshape(3, 1) print(a) a = np.insert(a, 0, -999, axis=0) print(a) assert (False) # array vs 1d vector a = np.array([1, 2, 3]) print(a.shape) a = np.array([[1, 2, 3]]) print(a) a = np.array([1, 2, 3]).reshape(3, 1) print(a) assert (False)
def main(): for site_packages in site.getsitepackages(): list_files(site_packages)
<option label="Debug(Attach by ptvsd)" value="ptvsd"/> <option label="Debug(Attach by rpdb)" value="rpdb"/> </options> </param> <param field="Mode2" label="Repeat Time(s)" width="30px" required="true" default="30"/> <param field="Address" label="IP" width="100px" required="true"/> <param field="Mode3" label="Token" width="250px" required="true"/> </params> </plugin> """ # Fix import of libs installed with pip as PluginSystem has a wierd pythonpath... import os import sys import site for mp in site.getsitepackages(): sys.path.append(mp) import Domoticz import miio import functools class Heartbeat(): def __init__(self, interval): self.callback = None self.count = 0 # stage interval self.seek = 0 self.interval = 10
def bootstrap(topsrcdir): # Ensure we are running Python 2.7 or 3.5+. We put this check here so we # generate a user-friendly error message rather than a cryptic stack trace # on module import. major = sys.version_info[:2][0] if sys.version_info < (3, 6): print("Python 3.6+ is required to run mach.") print("You are running Python", platform.python_version()) sys.exit(1) # This directory was deleted in bug 1666345, but there may be some ignored # files here. We can safely just delete it for the user so they don't have # to clean the repo themselves. deleted_dir = os.path.join(topsrcdir, "third_party", "python", "psutil") if os.path.exists(deleted_dir): shutil.rmtree(deleted_dir, ignore_errors=True) if major == 3 and sys.prefix == sys.base_prefix: # We are not in a virtualenv. Remove global site packages # from sys.path. # Note that we don't ever invoke mach from a Python 2 virtualenv, # and "sys.base_prefix" doesn't exist before Python 3.3, so we # guard with the "major == 3" check. site_paths = set(site.getsitepackages() + [site.getusersitepackages()]) sys.path = [path for path in sys.path if path not in site_paths] # Global build system and mach state is stored in a central directory. By # default, this is ~/.mozbuild. However, it can be defined via an # environment variable. We detect first run (by lack of this directory # existing) and notify the user that it will be created. The logic for # creation is much simpler for the "advanced" environment variable use # case. For default behavior, we educate users and give them an opportunity # to react. We always exit after creating the directory because users don't # like surprises. sys.path[0:0] = mach_sys_path(topsrcdir) import mach.base import mach.main from mach.util import setenv from mozboot.util import get_state_dir # Set a reasonable limit to the number of open files. # # Some linux systems set `ulimit -n` to a very high number, which works # well for systems that run servers, but this setting causes performance # problems when programs close file descriptors before forking, like # Python's `subprocess.Popen(..., close_fds=True)` (close_fds=True is the # default in Python 3), or Rust's stdlib. In some cases, Firefox does the # same thing when spawning processes. We would prefer to lower this limit # to avoid such performance problems; processes spawned by `mach` will # inherit the limit set here. # # The Firefox build defaults the soft limit to 1024, except for builds that # do LTO, where the soft limit is 8192. We're going to default to the # latter, since people do occasionally do LTO builds on their local # machines, and requiring them to discover another magical setting after # setting up an LTO build in the first place doesn't seem good. # # This code mimics the code in taskcluster/scripts/run-task. try: import resource # Keep the hard limit the same, though, allowing processes to change # their soft limit if they need to (Firefox does, for instance). (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE) # Permit people to override our default limit if necessary via # MOZ_LIMIT_NOFILE, which is the same variable `run-task` uses. limit = os.environ.get("MOZ_LIMIT_NOFILE") if limit: limit = int(limit) else: # If no explicit limit is given, use our default if it's less than # the current soft limit. For instance, the default on macOS is # 256, so we'd pick that rather than our default. limit = min(soft, 8192) # Now apply the limit, if it's different from the original one. if limit != soft: resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) except ImportError: # The resource module is UNIX only. pass from mozbuild.util import patch_main patch_main() def resolve_repository(): import mozversioncontrol try: # This API doesn't respect the vcs binary choices from configure. # If we ever need to use the VCS binary here, consider something # more robust. return mozversioncontrol.get_repository_object(path=topsrcdir) except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): return None def pre_dispatch_handler(context, handler, args): # If --disable-tests flag was enabled in the mozconfig used to compile # the build, tests will be disabled. Instead of trying to run # nonexistent tests then reporting a failure, this will prevent mach # from progressing beyond this point. if handler.category == "testing" and not handler.ok_if_tests_disabled: from mozbuild.base import BuildEnvironmentNotFoundException try: from mozbuild.base import MozbuildObject # all environments should have an instance of build object. build = MozbuildObject.from_environment() if build is not None and hasattr(build, "mozconfig"): ac_options = build.mozconfig["configure_args"] if ac_options and "--disable-tests" in ac_options: print( "Tests have been disabled by mozconfig with the flag " + '"ac_add_options --disable-tests".\n' + "Remove the flag, and re-compile to enable tests.") sys.exit(1) except BuildEnvironmentNotFoundException: # likely automation environment, so do nothing. pass def post_dispatch_handler(context, handler, instance, success, start_time, end_time, depth, args): """Perform global operations after command dispatch. For now, we will use this to handle build system telemetry. """ # Don't finalize telemetry data if this mach command was invoked as part of # another mach command. if depth != 1: return _finalize_telemetry_glean(context.telemetry, handler.name == "bootstrap", success) def populate_context(key=None): if key is None: return if key == "state_dir": state_dir = get_state_dir() if state_dir == os.environ.get("MOZBUILD_STATE_PATH"): if not os.path.exists(state_dir): print( "Creating global state directory from environment variable: %s" % state_dir) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): if not os.environ.get("MOZ_AUTOMATION"): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) print("\nCreating default state directory: %s" % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir if key == "local_state_dir": return get_state_dir(srcdir=True) if key == "topdir": return topsrcdir if key == "pre_dispatch_handler": return pre_dispatch_handler if key == "post_dispatch_handler": return post_dispatch_handler if key == "repository": return resolve_repository() raise AttributeError(key) # Note which process is top-level so that recursive mach invocations can avoid writing # telemetry data. if "MACH_MAIN_PID" not in os.environ: setenv("MACH_MAIN_PID", str(os.getpid())) driver = mach.main.Mach(os.getcwd()) driver.populate_context_handler = populate_context if not driver.settings_paths: # default global machrc location driver.settings_paths.append(get_state_dir()) # always load local repository configuration driver.settings_paths.append(topsrcdir) for category, meta in CATEGORIES.items(): driver.define_category(category, meta["short"], meta["long"], meta["priority"]) # Sparse checkouts may not have all mach_commands.py files. Ignore # errors from missing files. Same for spidermonkey tarballs. repo = resolve_repository() missing_ok = (repo is not None and repo.sparse_checkout_present()) or os.path.exists( os.path.join(topsrcdir, "INSTALL")) for path in MACH_MODULES: try: driver.load_commands_from_file(os.path.join(topsrcdir, path)) except mach.base.MissingFileError: if not missing_ok: raise return driver
ppSRQ = 22 GPIO.setup(ppFRAME, GPIO.OUT) GPIO.output(ppFRAME, False) #Initialize FRAME signal time.sleep(.001) #let Pi-Plate reset SPI engine if necessary GPIO.setup(ppACK, GPIO.IN, pull_up_down=GPIO.PUD_UP) GPIO.setup(ppSRQ, GPIO.IN, pull_up_down=GPIO.PUD_UP) try: spi = spidev.SpiDev() spi.open(0, 1) except: print("Did you enable the SPI hardware interface on your Raspberry Pi?") print("Go to https://pi-plates.com/getting_started/ and learn how.") localPath = site.getsitepackages()[0] helpPath = localPath + '/piplates/DIGIhelp.txt' #helpPath='DIGIhelp.txt' #for development only DIGIversion = 1.0 # Version 1.0 - initial release RMAX = 2000 MAXADDR = 8 digisPresent = list(range(8)) DataGood = False lock = threading.Lock() lock.acquire() #==============================================================================# # HELP Functions #
def site_packages(): site_packages = site.getsitepackages() if len(site_packages) > 0: return site_packages[0]
def install(): check_dependencies() if not os.path.exists(IS_BUILD): build() install_folder = getinput('\nEnter custom installation directory or press enter', default_install_folder) install_folder = os.path.abspath(install_folder) if os.path.exists(install_folder) and os.path.exists(os.path.join(install_folder, IS_LABSCRIPT_SUITE)): if not yn_choice('\nReplace existing installation? in %s? ' % install_folder + 'userlib and configuration ' + 'will be kept, but backing them up is recommended.', default='n'): print('Cancelled') sys.exit(1) uninstall(confirm=False) os.chdir(this_folder) # Add libs to python's search path: site_packages_dir = site.getsitepackages()[0] pth_file = os.path.join(site_packages_dir, 'labscript_suite.pth') print('Adding to Python search path (%s)' % pth_file) # temporarily escalate privileges so we can create the .pth file: with escalated_privileges(): with open(pth_file, 'w') as f: f.write(install_folder + '\n') f.write(os.path.join(install_folder, 'userlib') + '\n') f.write(os.path.join(install_folder, 'userlib', 'pythonlib') + '\n') # Prepend the install directory to sys.path, so that upcoming labscript_utils # imports use the installed module and not the temporary downloaded copy in this # working directory. This is important for submodules that introspect their paths, # like labscript_utils.winshell. sys.path.insert(0, install_folder) # Unload any previously imported copies of labscript_utils: for item in sys.modules.copy(): if item.startswith('labscript_utils'): del sys.modules[item] print('Copying files') if not os.path.exists(install_folder): try: os.mkdir(install_folder) except OSError as e: sys.stderr.write('Could not create to install directory:\n %s' % str(e)) sys.exit(1) try: # Add file that marks this as a labscript suite install dir: with open(os.path.join(install_folder, IS_LABSCRIPT_SUITE), 'w'): pass for entry in os.listdir('.'): if not exclude_from_copying(entry): if os.path.isdir(entry): dest = os.path.join(install_folder, entry) copy = shutil.copytree else: dest = install_folder copy = shutil.copy if entry in do_not_delete: if os.path.exists(dest): continue copy(entry, dest) except OSError as e: sys.stderr.write('Could not write to install directory:\n %s' % str(e)) sys.exit(1) # Rename setup.py to uninstall.py, as it will function only as an uninstaller from within the # labscript install directory: shutil.move(os.path.join(install_folder, 'setup.py'), os.path.join(install_folder, 'uninstall.py')) # Replace the readme file with one with instructions for uninstalling only os.unlink(os.path.join(install_folder, README)) with open(os.path.join(install_folder, 'README_uninstall.txt'), 'w') as f: f.write('To uninstall, run: \n\n' + ' python uninstall.py\n\n' + 'in this directory.\n' + 'userlib and configuration ' + 'will be kept, but backing them up is recommended.\n') # Remove the dependencies.txt file: os.unlink(os.path.join(install_folder, DEPENDENCIES)) # Reload the site module so later code sees these paths: reload(site) make_labconfig_file(install_folder) mkdir_p(os.path.join(install_folder, 'userlib', 'app_saved_configs')) if os.name == 'nt': print('adding application shortcuts') # TODO make this work on linux! if os.name == 'nt': from labscript_utils.winshell import make_shortcut, add_to_start_menu for appname in gui_programs: shortcut_path = make_shortcut(appname) add_to_start_menu(shortcut_path) # Clear the icon cache so Windows gets the shortcut icons right even if they were previously broken: if not (struct.calcsize("P") == 8) and (platform.machine().endswith('64')): # 64-bit windows auto-redirects 32-bit python calls away from system32 # have to use full path with emulator re-direct exe = os.path.join(os.environ['WINDIR'],'sysnative','ie4uinit.exe') else: exe = 'ie4uinit.exe' try: subprocess.Popen([exe, '-ClearIconCache']) except Exception: sys.stderr.write('failed to clear icon cache, icons might be blank\n') print('done')
# Extract all of the Python packages for OpenMOC # (ie, openmoc.log, openmoc.materialize, etc) packages = config.packages, # Include SWIG interface files in package - this is important for PyPI package_data = {'' : ['*.i*']}, # Inject our custom compiler and linker triggers cmdclass={ 'build_ext': custom_build_ext, 'install': custom_install} ) # Rerun the build_py to setup links for C++ extension modules created by SWIG # This prevents us from having to install twice build_py = build_py(dist) build_py.ensure_finalized() build_py.run() # Remove the shared library in the site packages if "clean" in sys.argv: install_location = site.getsitepackages()[0] print("Removing build from " + install_location) os.system("rm -rf " + install_location + "/*openmoc*") install_location = site.getusersitepackages() print("Removing build from " + install_location) os.system("rm -rf " + install_location + "/*openmoc*") install_location = "./tests/" print("Removing build from " + install_location) os.system("rm -rf " + install_location + "openmoc " + install_location + "build")