def TestPlatForm(): print "--------------Operation System------------" print platform.architecture() print platform.platform() print platform.system() print platform.python_version()
def print_platform_info(): import platform logging.debug('*************** PLATFORM INFORMATION ************************') logging.debug('==Interpreter==') logging.debug('Version :' + platform.python_version()) logging.debug('Version tuple:' + str(platform.python_version_tuple())) logging.debug('Compiler :' + platform.python_compiler()) logging.debug('Build :' + str(platform.python_build())) logging.debug('==Platform==') logging.debug('Normal :' + platform.platform()) logging.debug('Aliased:' + platform.platform(aliased=True)) logging.debug('Terse :' + platform.platform(terse=True)) logging.debug('==Operating System and Hardware Info==') logging.debug('uname:' + str(platform.uname())) logging.debug('system :' + platform.system()) logging.debug('node :' + platform.node()) logging.debug('release :' + platform.release()) logging.debug('version :' + platform.version()) logging.debug('machine :' + platform.machine()) logging.debug('processor:' + platform.processor()) logging.debug('==Executable Architecture==') logging.debug('interpreter:' + str(platform.architecture())) logging.debug('/bin/ls :' + str(platform.architecture('/bin/ls'))) logging.debug('*******************************************************')
def collect_job(): config = utils.get_config() disks = config[utils.DISK_SECTION] interfaces = config[utils.INET_SECTION] account = Account(config[utils.GENERAL_SECTION].get('email'), config[utils.GENERAL_SECTION].get('user_key'), config[utils.GENERAL_SECTION].get('api_key')) report = {} usage = {} net = {} if os.name == 'nt': report['os'] = platform.system()+"-"+platform.win32_ver()[0]+" "+platform.win32_ver()[2] report['arch'] = platform.architecture()[0] else: report['loadAverage'] = {} if not os.name == 'nt': for idx, la in enumerate(os.getloadavg()): time_la = "1" if idx == 0 else "5" if idx == 2 else "15" report['loadAverage'][time_la] = "{0:.2f}".format(la) if platform.system() == 'Linux': report['os'] = platform.linux_distribution()[0]+"-"+platform.linux_distribution()[1]+" "+platform.linux_distribution()[2] report['arch'] = platform.architecture()[0] else: report['os'] = "Mac OS X - "+platform.mac_ver()[0] report['arch'] = platform.architecture()[0] for disk in disks.keys(): if disks[disk] == utils.ENABLED and check_disk(disk): usage_temp = psutil.disk_usage(disk) usage[disk] = {'total': usage_temp.total, 'used': usage_temp.used, 'free': usage_temp.free, 'percentage': usage_temp.percent} for interf in interfaces.keys(): if interfaces[interf] == utils.ENABLED: net_temp = dict((k.lower(),v) for k, v in psutil.net_io_counters(pernic=True).iteritems())[interf] net[interf] = {'sent': net_temp.bytes_sent, 'recv': net_temp.bytes_recv} report['inet'] = net report['disks'] = usage report['processes'] = {'value': len(psutil.pids())} report['loadAverage'] = {} if not os.name == 'nt': for idx, la in enumerate(os.getloadavg()): time_la = "1" if idx == 0 else "5" if idx == 2 else "15" report['loadAverage'][time_la] = "{0:.2f}".format(la) report['users'] = {'value': len(psutil.users())} report['uptime'] = str(datetime.now() - datetime.fromtimestamp(psutil.boot_time())).split('.')[0] report['kindDevice'] = 3 api_key = account.api_key url = "%s/%s" % (system_config['ROUTES'].get('collect'), config[utils.GENERAL_SECTION].get('serial')) params = {'apiKey': api_key, 'data': json.dumps(report)} try: response = http.request('POST', url, params, {'user-key': account.user_key}, encode_multipart=False) except Exception, e: console.error("Check your connection") return
def getConf() : global pyinstaller_url, pyinstaller_zip, pyinstaller_rep, input_file, script_nom, executable, output_file config = ConfigParser.RawConfigParser() config.read(os.path.join(g_path_base, g_conf_file)) log.debug(u"{} {} = {}".format(g_conf_file, type(os.path.join(g_path_base, g_conf_file)), os.path.join(g_path_base, g_conf_file))) try : pyinstaller_url = config.get("pyinstaller", "url") pyinstaller_zip = os.path.basename(pyinstaller_url) if ".tar" in pyinstaller_zip : pyinstaller_rep = '.' + os.path.splitext(os.path.splitext(pyinstaller_zip)[0])[0] else : pyinstaller_rep = '.' + os.path.splitext(pyinstaller_zip)[0] input_file = os.path.basename(sys.argv[1]) if not input_file.endswith(".py") : quit(1) script_nom = os.path.splitext(input_file)[0] if sys.platform == "win32" : executable = script_nom + ".exe" else : executable = script_nom if sys.platform == "win32" : if platform.architecture()[0] == "32bit" : output_file = executable else : output_file = script_nom + "_x64.exe" else : if platform.architecture()[0] == "32bit" : output_file = executable + ".run" else : output_file = script_nom + "_x64.run" except ConfigParser.Error, e : log.critical(u"Error in configuration file ({})".format(e)) quit(1)
def generate_download_URL(): download_URL = 'http://' + AGENT_DOWNLOAD_URL download_URL = download_URL + ':8080/BTSecureCloudServer/BTSecureCloud/downloadVor?' download_URL = download_URL + '&operatingSystem=' + platform.system() if platform.system() == 'Windows': download_URL = download_URL + '&distribution=N/A' #platform.architecture()[0] == '64bit' does not work if python 32bit is used if platform.architecture()[0] == '64bit' or '(x86)' in os.environ['PROGRAMFILES']: download_URL = download_URL + '&architecture=64bit' else: download_URL = download_URL + '&architecture=32bit' else: distribution = platform.linux_distribution()[0] if 'Ubuntu' in distribution: download_URL = download_URL + '&distribution=Ubuntu' elif 'SUSE' in distribution: download_URL = download_URL + '&distribution=Suse' elif 'Red Hat' in distribution: download_URL = download_URL + '&distribution=Red%20hat6' elif 'CentOS' in distribution: download_URL = download_URL + '&distribution=CentOS' download_URL = download_URL + '&architecture=' + platform.architecture()[0] #platform.machine() #download_URL = download_URL + '&kernelversion=' + platform.platform() #download_URL = download_URL + '&agentversion=' + agent_version return download_URL
def test_compilation(cfile, compiler=None, **compiler_attrs): """Test simple compilation with given settings""" cc = get_compiler(compiler, **compiler_attrs) efile, ext = os.path.splitext(cfile) cpreargs = lpreargs = [] if sys.platform == 'darwin': # use appropriate arch for compiler if platform.architecture()[0]=='32bit': if platform.processor() == 'powerpc': cpu = 'ppc' else: cpu = 'i386' cpreargs = ['-arch', cpu] lpreargs = ['-arch', cpu, '-undefined', 'dynamic_lookup'] else: # allow for missing UB arch, since it will still work: lpreargs = ['-undefined', 'dynamic_lookup'] if sys.platform == 'sunos5': if platform.architecture()[0]=='32bit': lpreargs = ['-m32'] else: lpreargs = ['-m64'] extra = compiler_attrs.get('extra_compile_args', []) extra += ['--std=c++11'] objs = cc.compile([cfile], extra_preargs=cpreargs, extra_postargs=extra) cc.link_executable(objs, efile, extra_preargs=lpreargs) return efile
def calculate_resizing_tofitmemory(x_size,y_size,n_slices,byte): """ Predicts the percentage (between 0 and 1) to resize the image to fit the memory, giving the following information: x_size, y_size: image size n_slices: number of slices byte: bytes allocated for each pixel sample """ imagesize = x_size * y_size * n_slices * byte * 28 # USING LIBSIGAR #import sigar #sg = sigar.open() #ram_free = sg.mem().actual_free() #ram_total = sg.mem().total() #swap_free = sg.swap().free() #sg.close() # USING PSUTIL import psutil try: if (psutil.version_info>=(0,6,0)): ram_free = psutil.virtual_memory().available ram_total = psutil.virtual_memory().total swap_free = psutil.swap_memory().free else: ram_free = psutil.phymem_usage().free + psutil.cached_phymem() + psutil.phymem_buffers() ram_total = psutil.phymem_usage().total swap_free = psutil.virtmem_usage().free except: print "Exception! psutil version < 0.3 (not recommended)" ram_total = psutil.TOTAL_PHYMEM # this is for psutil < 0.3 ram_free = 0.8 * psutil.TOTAL_PHYMEM swap_free = psutil.avail_virtmem() print "RAM_FREE=", ram_free print "RAM_TOTAL=", ram_total if (sys.platform == 'win32'): if (platform.architecture()[0] == '32bit'): if ram_free>1400000000: ram_free=1400000000 if ram_total>1400000000: ram_total=1400000000 if (sys.platform == 'linux2'): if (platform.architecture()[0] == '32bit'): if ram_free>3500000000: ram_free=3500000000 if ram_total>3500000000: ram_total=3500000000 if (swap_free>ram_total): swap_free=ram_total resize = (float((ram_free+0.5*swap_free)/imagesize)) resize=math.sqrt(resize) # this gives the "resize" for each axis x and y if (resize>1): resize=1 return round(resize,2)
def get_jlink_dll(): # what kind of system am I? import platform if platform.architecture()[0] == '32bit': libpath = 'lib32' elif platform.architecture()[0] == '64bit': libpath = 'lib64' else: libpath = '' raise Exception(repr(platform.architecture())) # start with the script path search_path = [os.path.join(os.path.dirname(os.path.realpath(__file__)), libpath)] search_path += sys.path[:] # copy sys.path list # if environment variable is set, insert this path first try: search_path.insert(0, os.environ['JLINK_PATH']) except KeyError: try: search_path.extend(os.environ['PATH'].split(os.pathsep)) except KeyError: pass if sys.platform == 'win32': jlink, backend_info = locate_library('jlinkarm.dll', search_path) elif sys.platform == 'linux2': jlink, backend_info = locate_library('libjlinkarm.so.4', search_path, ctypes.cdll) elif sys.platform == 'darwin': jlink, backend_info = locate_library('libjlinkarm.so.4.dylib', search_path, ctypes.cdll) return jlink, backend_info
def get_system_spec(): """Collect information about the system and installation. """ import pkg_resources import platform if sys.platform == 'darwin': system_info = 'macOS {} {}'.format( platform.mac_ver()[0], platform.architecture()[0], ) else: system_info = '{} {} {} {}'.format( platform.system(), '_'.join(platform.architecture()), platform.release(), platform.machine(), ) system_spec = dict( raiden=pkg_resources.require(raiden.__name__)[0].version, python_implementation=platform.python_implementation(), python_version=platform.python_version(), system=system_info, ) return system_spec
def finalize_options(self): '''Overloaded build_ext implementation to append custom openssl include file and library linking options''' build_ext.build_ext.finalize_options(self) opensslIncludeDir = os.path.join(self.openssl, 'include') opensslLibraryDir = os.path.join(self.openssl, 'lib') self.swig_opts = ['-I%s' % i for i in self.include_dirs + \ [opensslIncludeDir, os.path.join(opensslIncludeDir, "openssl")]] self.swig_opts.append('-includeall') self.swig_opts.append('-modern') # Fedora does hat tricks. if platform.linux_distribution()[0] in ['Fedora', 'CentOS']: if platform.architecture()[0] == '64bit': self.swig_opts.append('-D__x86_64__') elif platform.architecture()[0] == '32bit': self.swig_opts.append('-D__i386__') self.include_dirs += [os.path.join(self.openssl, opensslIncludeDir), os.path.join(os.getcwd(), 'SWIG')] if sys.platform == 'cygwin': # Cygwin SHOULD work (there's code in distutils), but # if one first starts a Windows command prompt, then bash, # the distutils code does not seem to work. If you start # Cygwin directly, then it would work even without this change. # Someday distutils will be fixed and this won't be needed. self.library_dirs += [os.path.join(self.openssl, 'bin')] self.library_dirs += [os.path.join(self.openssl, opensslLibraryDir)]
def setUp(self): BaseTestIO.setUp(self) if sys.platform.startswith('win'): distantfile = 'http://download.multichannelsystems.com/download_data/software/neuroshare/nsMCDLibrary_3.7b.zip' localfile = os.path.join(tempfile.gettempdir(),'nsMCDLibrary_3.7b.zip') if not os.path.exists(localfile): urlretrieve(distantfile, localfile) if platform.architecture()[0].startswith('64'): self.dllname = os.path.join(tempfile.gettempdir(),'Matlab/Matlab-Import-Filter/Matlab_Interface/nsMCDLibrary64.dll') if not os.path.exists(self.dllname): zip = zipfile.ZipFile(localfile) zip.extract('Matlab/Matlab-Import-Filter/Matlab_Interface/nsMCDLibrary64.dll', path = tempfile.gettempdir()) else: self.dllname = os.path.join(tempfile.gettempdir(),'Matlab/Matlab-Import-Filter/Matlab_Interface/nsMCDLibrary.dll') if not os.path.exists(self.dllname): zip = zipfile.ZipFile(localfile) zip.extract('Matlab/Matlab-Import-Filter/Matlab_Interface/nsMCDLibrary.dll', path = tempfile.gettempdir()) elif sys.platform.startswith('linux'): if platform.architecture()[0].startswith('64'): distantfile = 'http://download.multichannelsystems.com/download_data/software/neuroshare/nsMCDLibrary_Linux64_3.7b.tar.gz' localfile = os.path.join(tempfile.gettempdir(),'nsMCDLibrary_Linux64_3.7b.tar.gz') else: distantfile = 'http://download.multichannelsystems.com/download_data/software/neuroshare/nsMCDLibrary_Linux32_3.7b.tar.gz' localfile = os.path.join(tempfile.gettempdir(),'nsMCDLibrary_Linux32_3.7b.tar.gz') if not os.path.exists(localfile): urlretrieve(distantfile, localfile) self.dllname = os.path.join(tempfile.gettempdir(),'nsMCDLibrary/nsMCDLibrary.so') if not os.path.exists(self.dllname): tar = tarfile.open(localfile) tar.extract('nsMCDLibrary/nsMCDLibrary.so', path = tempfile.gettempdir()) else: raise unittest.SkipTest("Not currently supported on OS X")
def ensure_32bit_environment(): if "64bit" in platform.architecture()[0] and "download_dependencies" not in sys.argv[0]: import signal if os.name == "nt": py = cp.get("Windows", "Python32Bit") print "Have to switch to 32-bit Python in %s. Visual Studio breakpoints will not work!!" % py print "To make them work, setup the 32-bit Python in your Visual Studio environment" child = subprocess.Popen([py] + sys.argv) elif "linux" in sys.platform: py = cp.get("Linux", "Python32Bit") print "Have to switch to 32-bit Python in %s." % py print "If this doesn't work, make sure you have the following 32-bit packages installed (Ubuntu 14.10 names):" print "\tlibffi6:i386 libffi-dev:i386 libffi-dev libc6:i386 libc6-dev-i386 libbz2-1.0:i386 libexpat1:i386 zlib1g:i386 libssl1.0.0:i386 libgcrypt11:i386 libtinfo5:i386 libsdl1.2-dev:i386 gcc-multilib" print "If you cannot install these, consider setting up a 32-bit chroot." os.environ["CC"] = os.getenv("CC", "cc") + " -m32" os.environ["CFLAGS"] = os.getenv("CFLAGS", "") + " -m32" os.environ["PYTHONPATH"] = os.getenv("PYTHONPATH", "") + ":" + cp.get("Linux", "pygame_cffi") child = subprocess.Popen([py] + sys.argv) elif "darwin" == sys.platform and "64bit" in platform.architecture()[0]: print "Trying to switch to 32-bit Python by setting VERSIONER_PYTHON_PREFER_32_BIT. You have to run with the system Python for this to work." os.environ["VERSIONER_PYTHON_PREFER_32_BIT"] = "yes" child = subprocess.Popen([sys.executable] + sys.argv) else: raise AssertionError("Unsupported platform") def handler(signum, frame): child.send_signal(signum) for sig in ["SIGTERM", "SIGINT", "SIGKILL"]: try: signal.signal(getattr(signal, sig), handler) except Exception: pass exit(child.wait())
def get_encryption_lib_path(): lib_path = "" # win32 doesn't mean necessarily 32 bits if sys.platform == "win32": if platform.architecture()[0] == '64bit': lib_path = os.path.join(os.path.dirname(__file__), "encrypt64bit.dll") else: lib_path = os.path.join(os.path.dirname(__file__), "encrypt32bit.dll") elif sys.platform == "darwin": lib_path = os.path.join(os.path.dirname(__file__), "libencrypt-osx-64.so") elif os.uname()[4].startswith("arm") and platform.architecture()[0] == '32bit': lib_path = os.path.join(os.path.dirname(__file__), "libencrypt-linux-arm-32.so") elif sys.platform.startswith('linux'): if platform.architecture()[0] == '64bit': lib_path = os.path.join(os.path.dirname(__file__), "libencrypt-linux-x86-64.so") else: lib_path = os.path.join(os.path.dirname(__file__), "libencrypt-linux-x86-32.so") elif sys.platform.startswith('freebsd-10'): lib_path = os.path.join(os.path.dirname(__file__), "libencrypt-freebsd10-64.so") else: err = "Unexpected/unsupported platform '{}'".format(sys.platform) log.error(err) raise Exception(err) if not os.path.isfile(lib_path): err = "Could not find {} encryption library {}".format(sys.platform, lib_path) log.error(err) raise Exception(err) return lib_path
def print_platform_info(): import platform logging.debug("*************** PLATFORM INFORMATION ************************") logging.debug("==Interpreter==") logging.debug("Version :" + platform.python_version()) logging.debug("Version tuple:" + str(platform.python_version_tuple())) logging.debug("Compiler :" + platform.python_compiler()) logging.debug("Build :" + str(platform.python_build())) logging.debug("==Platform==") logging.debug("Normal :" + platform.platform()) logging.debug("Aliased:" + platform.platform(aliased=True)) logging.debug("Terse :" + platform.platform(terse=True)) logging.debug("==Operating System and Hardware Info==") logging.debug("uname:" + str(platform.uname())) logging.debug("system :" + platform.system()) logging.debug("node :" + platform.node()) logging.debug("release :" + platform.release()) logging.debug("version :" + platform.version()) logging.debug("machine :" + platform.machine()) logging.debug("processor:" + platform.processor()) logging.debug("==Executable Architecture==") logging.debug("interpreter:" + str(platform.architecture())) logging.debug("/bin/ls :" + str(platform.architecture("/bin/ls"))) logging.debug("*******************************************************")
def __init__(self): self.clearStats() dir = os.path.dirname(__file__) #Determine correct library to load for 64-bit vs. 32-bit if os.name == 'nt': if platform.architecture() == '64bit': libname = 'libcpa_x64.dll' else: libname = 'libcpa.dll' elif os.name == 'posix': if platform.architecture() == '64bit': libname = 'libcpa_x64.so' else: libname = 'libcpa.so' libname = os.path.join(dir, 'c_accel/%s' % libname) try: dll = CDLL(libname) except Exception: raise Exception("Could not import library file. Compile it for your platform first (there is a makefile for that): " + libname) self.osk = dll.oneSubkey self.modelstate = {'knownkey':None}
def __init__(self, referenceAVD, androidSDKPath, androidTemporaryPath, androguardPath, typeOfDevice, deviceId): self.androidSDKPath = androidSDKPath self.androidTemporaryPath = androidTemporaryPath # Make the assumption that if platform is 64 bit, we have emulator64 if '64bit' == (platform.architecture()[0]): if not os.path.exists(os.path.join(androidSDKPath, "tools/emulator64-arm")): raise Exception("File {0} doesn't exist".format(os.path.join(androidSDKPath, "tools/emulator64-arm"))) self.emulatorPath = os.path.join(androidSDKPath, "tools/emulator64-arm") elif '32bit' == (platform.architecture()[0]): if not os.path.exists(os.path.join(androidSDKPath, "tools/emulator-arm")): raise Exception("File {0} doesn't exist".format(os.path.join(androidSDKPath, "tools/emulator-arm"))) self.emulatorPath = os.path.join(androidSDKPath, "tools/emulator-arm") else: raise Exception("Platform architecture is not recognized: {0}".format(platform.architecture()[0])) self.adbPath = os.path.join(androidSDKPath, "platform-tools/adb") self.androguardPath = androguardPath self.typeOfDevice = typeOfDevice # Differentiate real and emulated configurations if self.typeOfDevice=='real': self.deviceId=deviceId self.referenceAVD = None else: self.referenceAVD = referenceAVD self.virtualDevicePath = os.path.dirname(referenceAVD)
def HostArch(): """Returns the host architecture with a predictable string.""" host_arch = platform.machine() # Convert machine type to format recognized by gyp. if re.match(r'i.86', host_arch) or host_arch == 'i86pc': host_arch = 'x86' elif host_arch in ['x86_64', 'amd64']: host_arch = 'x64' elif host_arch.startswith('arm'): host_arch = 'arm' elif host_arch.startswith('aarch64'): host_arch = 'arm64' elif host_arch.startswith('mips64'): host_arch = 'mips64' elif host_arch.startswith('mips'): host_arch = 'mips' elif host_arch.startswith('ppc'): host_arch = 'ppc' elif host_arch.startswith('s390'): host_arch = 's390' # platform.machine is based on running kernel. It's possible to use 64-bit # kernel with 32-bit userland, e.g. to give linker slightly more memory. # Distinguish between different userland bitness by querying # the python binary. if host_arch == 'x64' and platform.architecture()[0] == '32bit': host_arch = 'x86' if host_arch == 'arm64' and platform.architecture()[0] == '32bit': host_arch = 'arm' return host_arch
def __init__(self): self.clearStats() dir = os.path.dirname(__file__) #Determine correct library to load for 64-bit vs. 32-bit if os.name == 'nt': if platform.architecture() == '64bit': libname = 'libcpa_x64.dll' else: libname = 'libcpa.dll' elif os.name == 'posix': if platform.architecture() == '64bit': libname = 'libcpa_x64.so' else: libname = 'libcpa.so' try: dll = CDLL(os.path.join(dir, 'c_accel/%s'%libname)) except IOError: #TODO: better error reporting print "error" self.osk = dll.oneSubkey self.modelstate = {'knownkey':None}
def write_system_report(self): string = '' localtime = time.localtime() timeString = time.strftime("==========================%Y-%m-%d %H:%M:%S ", localtime) timezone = -(time.altzone if time.localtime().tm_isdst else time.timezone) timeString += "Z" if timezone == 0 else "+" if timezone > 0 else "-" timeString += time.strftime("%H:%M==========================", time.gmtime(abs(timezone))) string += timeString string += "\ndefault locale: " + locale.getdefaultlocale()[0] + ' ' \ + locale.getdefaultlocale()[1] string += "\nhome directory: " + path.expanduser('~') string += "\nlog file path:\t" + self.fullPath string += "\nmachine:\t" + platform.machine() string += "\narchitecture:\t" + platform.architecture()[0] + ' ' \ + platform.architecture()[1] string += "\nplatform:\t" + platform.platform() try: self.write_file('\n\n\n' + string + '\n') except: print '+++++ unable to write to file +++++' print string + '\n'
def perform_update(endpoint, current_version, app_name='', password=None, update_available_callback=None, progress_callback=None, update_complete_callback=None): requests_session = create_requests_session(app_name=app_name, version=current_version) available_update = find_update(endpoint, requests_session=requests_session) if not available_update: logger.debug("No update available") return False available_version = float(available_update['current_version']) if not float(available_version) > float(current_version) or platform.system()+platform.architecture()[0][:2] not in available_update['downloads']: logger.debug("No update for this architecture") return False available_description = available_update.get('description', None) update_url = available_update ['downloads'][platform.system()+platform.architecture()[0][:2]] logger.info("A new update is available. Version %s" % available_version) donation() if callable(update_available_callback) and not update_available_callback(version=available_version, description=available_description): #update_available_callback should return a falsy value to stop the process logger.info("User canceled update.") return base_path = tempfile.mkdtemp() download_path = os.path.join(base_path, 'update.zip') update_path = os.path.join(base_path, 'update') downloaded = download_update(update_url, download_path, requests_session=requests_session, progress_callback=progress_callback) extracted = extract_update(downloaded, update_path, password=password) bootstrap_path = move_bootstrap(extracted) execute_bootstrap(bootstrap_path, extracted) logger.info("Update prepared for installation.") if callable(update_complete_callback): update_complete_callback()
def TestPlatform( ): print ("----------Operation System--------------------------") # 取得 python 版本 print '取得 python 版本 : ' + platform.python_version() # 取得操作系統可執行結構 : ex('64bit','WindowsPE') print "取得操作系統可執行結構 : ex('64bit','WindowsPE')" print platform.architecture() # 電腦目前網路群組名稱 print '電腦目前網路群組名稱' + platform.node() # 獲取操作系統名稱及版本號,‘Windows-7-6.1.7601-SP1’ print '獲取操作系統名稱及版本號 : ' + platform.platform() # 電腦處理器資訊,’Intel64 Family 6 Model 42 Stepping 7, GenuineIntel’ print '電腦處理器資訊 : ' + platform.processor() # 獲取操作系統中 Python 的構建日期 print "獲取操作系統中 Python 的構建日期" print platform.python_build() # 獲取系統中 python 解釋器的信息 print '獲取系統中 python 解釋器的信息 : ' + platform.python_compiler() if platform.python_branch()=="": print platform.python_implementation() print platform.python_revision() print "platform.release : " + platform.release() print "platform.system : " + platform.system() #print platform.system_alias() # 獲取操作系統版本 print '獲取操作系統版本 : ' + platform.version()
def copy_modules(modules, src_root='src'): """ Copies native modules into src/ The routine implements build_update command functionality and executed after "setup.py build" command. """ import string, platform, shutil version = (string.split(sys.version)[0])[0:3] machine = platform.machine() if os.name == 'posix': prefix = 'build/lib.linux-' + machine + '-' + version ext = '.so' elif os.name == 'nt' and platform.architecture()[0] == '32bit': prefix = 'build/lib.win32-' + version ext = '.pyd' elif os.name == 'nt' and platform.architecture()[0] == '64bit': prefix = 'build/lib.win-amd64-' + version ext = '.pyd' for item in modules: path = os.path.join(*item.name.split('.')) + ext src = os.path.join(prefix, path) dst = os.path.join(src_root, path) shutil.copy(src, dst) print '>>>Module %s has been copied to src/ directory' % path
def get_thirdparty_binary(dir_name, fatal=True): suffix = '' if sys.platform == 'win32': platform_dir = 'win' suffix = '.exe' elif sys.platform == 'linux2': if platform.architecture()[0] == '64bit': platform_dir = 'lin64' elif platform.architecture()[0] == '32bit': platform_dir = 'lin32' else: raise Exception('Unsupported architecture') else: raise Exception('Unsupported platform') binary_path = os.path.normpath( os.path.join( thisdir, os.pardir, 'third_party', dir_name, platform_dir, dir_name + suffix)) if not os.path.isfile(binary_path): if fatal: raise Exception('No binary available for %r ' '(looked at %r)' % (dir_name, binary_path)) else: return None return binary_path
def setUp(self): self.connect_kwargs = tests.get_mysqlx_config() self.schema_name = self.connect_kwargs["schema"] try: self.client = mysqlx.get_client(self.connect_kwargs, "{}") self.session = self.client.get_session() except mysqlx.Error as err: self.fail("{0}".format(err)) if os.name == "nt": if "64" in platform.architecture()[0]: self.platform_arch = "x86_64" elif "32" in platform.architecture()[0]: self.platform_arch = "i386" else: self.platform_arch = platform.architecture() self.os_ver = "Windows-{}".format(platform.win32_ver()[1]) else: self.platform_arch = platform.machine() if platform.system() == "Darwin": self.os_ver = "{}-{}".format("macOS", platform.mac_ver()[0]) else: self.os_ver = "-".join(platform.linux_distribution()[0:2]) license_chunks = LICENSE.split(" ") if license_chunks[0] == "GPLv2": self.client_license = "GPL-2.0" else: self.client_license = "Commercial"
def run(self): global go global sel global fileo global stop global genoutput global sel while not stop: if genoutput: output = self.genOut() f = open(str(sel.filep).replace("\\","/"),"w") f.write(output) f.close() genoutput = False if go: if platform.system() == "Windows" and platform.architecture() == "32Bit": proc = subprocess.Popen('32bit\\overviewer.exe --config="'+str(fileo)+'"', shell=True, stdout=subprocess.PIPE) if platform.system() == "Windows" and platform.architecture() == "64Bit": proc = subprocess.Popen('64bit\\overviewer.exe --config="'+str(fileo)+'"', shell=True, stdout=subprocess.PIPE) if platform.system() == "Linux": proc = subprocess.Popen('overviewer.py --config="'+str(fileo)+'"', shell=True, stdout=subprocess.PIPE) while not stop: line = proc.stdout.readline() if line.strip() == "": pass else: print line.strip() sel.output.append(line.strip()) if not line: break if stop: break proc.wait() go = False
def __init__(self, **kwargs): super(PlatformApi, self).__init__(**kwargs) self._name = norm_plat(sys.platform) self._arch = 'intel' self._bits = norm_bits(platform.machine()) if self._test_data.enabled: # Default to linux/64, unless test case says otherwise. self._name = norm_plat(self._test_data.get('name', 'linux')) self._bits = norm_bits(self._test_data.get('bits', 64)) else: # pragma: no cover # platform.machine is based on running kernel. It's possible to use 64-bit # kernel with 32-bit userland, e.g. to give linker slightly more memory. # Distinguish between different userland bitness by querying # the python binary. if (self._name == 'linux' and self._bits == 64 and platform.architecture()[0] == '32bit'): self._bits = 32 # On Mac, the inverse of the above is true: the kernel is 32-bit but the # CPU and userspace both are capable of running 64-bit programs. elif (self._name == 'mac' and self._bits == 32 and platform.architecture()[0] == '64bit'): self._bits = 64
def get_repo(self): if not os.path.isdir('repo/'): os.makedirs('repo/') if platform.architecture()[0] == '32bit': arch = 'x86' else: arch = 'x86_64' slackrepo = None f = open('/etc/slackpkg/mirrors', 'r') for line in f: x = line.find('#') if x == -1: slackrepo = line[:-2] f.close() gobject.idle_add(printInThread, 'Download pkg') os.popen('wget -q '+slackrepo+'/PACKAGES.TXT -O repo/slackware.txt') os.popen('wget -q ' + slackrepo + '/extra/PACKAGES.TXT -O repo/slackware_extra.txt') os.popen('wget -q ' + slackrepo + '/pasture/PACKAGES.TXT -O repo/slackware_pasture.txt') os.popen('wget -q ' + slackrepo + '/patches/PACKAGES.TXT -O repo/slackware_patches.txt') os.popen('wget -q ' + slackrepo + '/testing/PACKAGES.TXT -O repo/slackware_testing.txt') os.popen('wget -q http://bear.alienbase.nl/mirrors/people/alien/sbrepos/'+platform.dist()[1]+'/'+arch+'/PACKAGES.TXT -O repo/alien.txt') # os.popen('wget -q http://www.slackware.com/~alien/slackbuilds/PACKAGES.TXT -O repo/slackbuild_alien.txt') # non-official repo os.popen('wget -q http://bear.alienbase.nl/mirrors/people/alien/restricted_sbrepos/'+platform.dist()[1]+'/'+arch+'/PACKAGES.TXT -O repo/alien_restricted.txt') if platform.architecture()[0] == '32bit': os.popen('wget -q http://repository.slacky.eu/slackware-'+platform.dist()[1]+'/PACKAGES.TXT -O repo/slacky.txt') else: os.popen('wget -q http://repository.slacky.eu/slackware64-'+platform.dist()[1]+'/PACKAGES.TXT -O repo/slacky.txt') os.popen('wget -q http://slakfinder.org/slackpkg+/PACKAGES.TXT -O repo/slackpkg_plus.txt')
def __init__(self): super(WindowsPlatform, self).__init__() self.default_generators = ["MinGW Makefiles", ] python_major_version = sys.version_info[0] # python 3 from the mothership uses VS 2010 if python_major_version == 3: vs_base = "Visual Studio 10" # python 2 from the mothership uses VS 2008 elif python_major_version == 2: vs_base = "Visual Studio 9 2008" else: raise RuntimeError("Only Python 2 and 3 are supported - " "please add support in platform_specific " "PyCMake folder") # Python is Win64, build a Win64 module if platform.architecture() == "x64": vs_base += " Win64" # only VS 11 and above support ARM, but include it here in hopes of # making future work easier. elif platform.architecture() == "ARM": vs_base += " ARM" # we're implicitly doing nothing for 32-bit builds. Their generator # string IDs seem to be just the vs_base. self.default_generators.append(vs_base)
def finalize_options(self): """Overloaded build_ext implementation to append custom openssl include file and library linking options""" build_ext.build_ext.finalize_options(self) self.add_multiarch_paths() opensslIncludeDir = os.path.join(self.openssl, "include") opensslLibraryDir = os.path.join(self.openssl, "lib") self.swig_opts = [ "-I%s" % i for i in self.include_dirs + [opensslIncludeDir, os.path.join(opensslIncludeDir, "openssl")] ] self.swig_opts.append("-includeall") self.swig_opts.append("-modern") # Fedora does hat tricks. if platform.linux_distribution()[0] in ["Fedora", "CentOS", "Red Hat Enterprise Linux Server"]: if platform.architecture()[0] == "64bit": self.swig_opts.append("-D__x86_64__") elif platform.architecture()[0] == "32bit": self.swig_opts.append("-D__i386__") self.include_dirs += [os.path.join(self.openssl, opensslIncludeDir), os.path.join(os.getcwd(), "SWIG")] if sys.platform == "cygwin": # Cygwin SHOULD work (there's code in distutils), but # if one first starts a Windows command prompt, then bash, # the distutils code does not seem to work. If you start # Cygwin directly, then it would work even without this change. # Someday distutils will be fixed and this won't be needed. self.library_dirs += [os.path.join(self.openssl, "bin")] self.library_dirs += [os.path.join(self.openssl, opensslLibraryDir)]
def DetectarSistema(): import platform import os #ARM if platform.machine() in ["armv71","mips","mipsel"]: arch= platform.machine() #PC elif platform.machine() in ["i686","AMD64"]: if platform.architecture()[0] =="64bit": arch = "x64" elif platform.architecture()[0] =="32bit": arch = "x86" else: if platform.architecture()[0] =="64bit": arch = "x64" elif platform.architecture()[0] =="32bit": arch = "x86" #Windows if os.name =="nt": OS ="windows" #Linux if os.name =="posix": OS ="linux" return OS, arch
import os import subprocess import platform import sys from pathlib import Path import fileinput import shutil print("SqBuild: Python ver: " + platform.python_version() + " (" + platform.architecture()[0] + "), CWD:'" + os.getcwd() + "'") if ( os.getcwd().endswith("SqCore") ): # VsCode's context menu 'Run Python file in Terminal' runs it from the workspace folder. VsCode F5 runs it from the project folder. We change it to the project folder os.chdir(os.getcwd() + "/src/WebServer/SqCoreWeb") # 1. Basic checks: Ensure Node.js is installed. If node_modules folder is empty or package.json changed since last nodeTouchFile, it should restore Npm packages. nodeTouchFile = os.getcwd() + "/node_modules/.install-stamp" nodeJsInstallNeeded = False if os.path.isfile(nodeTouchFile): print("SqBuild: /node_modules/ exist") nodeTouchModificationTimestamp = os.path.getmtime(nodeTouchFile) packageJsonModificationTimestamp = os.path.getmtime(os.getcwd() + "/package.json") if packageJsonModificationTimestamp > nodeTouchModificationTimestamp: nodeJsInstallNeeded = True else: nodeJsInstallNeeded = True if nodeJsInstallNeeded: nodeRetCode = os.system( "node --version") # don't want to run 'node --version' all the times.
def py_version(self): """ Get Python Verion """ return platform.python_version(), platform.architecture()[0]
def run_tests(pyb, tests, args, result_dir, num_threads=1): test_count = ThreadSafeCounter() testcase_count = ThreadSafeCounter() passed_count = ThreadSafeCounter() failed_tests = ThreadSafeCounter([]) skipped_tests = ThreadSafeCounter([]) skip_tests = set() skip_native = False skip_int_big = False skip_bytearray = False skip_set_type = False skip_slice = False skip_async = False skip_const = False skip_revops = False skip_io_module = False skip_fstring = False skip_endian = False has_complex = True has_coverage = False upy_float_precision = 32 # If we're asked to --list-tests, we can't assume that there's a # connection to target, so we can't run feature checks usefully. if not (args.list_tests or args.write_exp): # Even if we run completely different tests in a different directory, # we need to access feature_checks from the same directory as the # run-tests.py script itself so use base_path. # Check if micropython.native is supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "native_check.py") if output != b"native\n": skip_native = True # Check if arbitrary-precision integers are supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "int_big.py") if output != b"1000000000000000000000000000000000000000000000\n": skip_int_big = True # Check if bytearray is supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "bytearray.py") if output != b"bytearray\n": skip_bytearray = True # Check if set type (and set literals) is supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "set_check.py") if output != b"{1}\n": skip_set_type = True # Check if slice is supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "slice.py") if output != b"slice\n": skip_slice = True # Check if async/await keywords are supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "async_check.py") if output != b"async\n": skip_async = True # Check if const keyword (MicroPython extension) is supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "const.py") if output != b"1\n": skip_const = True # Check if __rOP__ special methods are supported, and skip such tests if it's not output = run_feature_check(pyb, args, base_path, "reverse_ops.py") if output == b"TypeError\n": skip_revops = True # Check if uio module exists, and skip such tests if it doesn't output = run_feature_check(pyb, args, base_path, "uio_module.py") if output != b"uio\n": skip_io_module = True # Check if fstring feature is enabled, and skip such tests if it doesn't output = run_feature_check(pyb, args, base_path, "fstring.py") if output != b"a=1\n": skip_fstring = True # Check if emacs repl is supported, and skip such tests if it's not t = run_feature_check(pyb, args, base_path, "repl_emacs_check.py") if "True" not in str(t, "ascii"): skip_tests.add("cmdline/repl_emacs_keys.py") # Check if words movement in repl is supported, and skip such tests if it's not t = run_feature_check(pyb, args, base_path, "repl_words_move_check.py") if "True" not in str(t, "ascii"): skip_tests.add("cmdline/repl_words_move.py") upy_byteorder = run_feature_check(pyb, args, base_path, "byteorder.py") upy_float_precision = run_feature_check(pyb, args, base_path, "float.py") try: upy_float_precision = int(upy_float_precision) except ValueError: upy_float_precision = 0 has_complex = run_feature_check(pyb, args, base_path, "complex.py") == b"complex\n" has_coverage = run_feature_check(pyb, args, base_path, "coverage.py") == b"coverage\n" cpy_byteorder = subprocess.check_output( CPYTHON3_CMD + [base_path("feature_check/byteorder.py")] ) skip_endian = upy_byteorder != cpy_byteorder # These tests don't test slice explicitly but rather use it to perform the test misc_slice_tests = ( "builtin_range", "class_super", "containment", "errno1", "fun_str", "generator1", "globals_del", "memoryview1", "memoryview_gc", "object1", "python34", "struct_endian", ) # Some tests shouldn't be run on GitHub Actions if os.getenv("GITHUB_ACTIONS") == "true": skip_tests.add("thread/stress_schedule.py") # has reliability issues if upy_float_precision == 0: skip_tests.add("extmod/uctypes_le_float.py") skip_tests.add("extmod/uctypes_native_float.py") skip_tests.add("extmod/uctypes_sizeof_float.py") skip_tests.add("extmod/ujson_dumps_float.py") skip_tests.add("extmod/ujson_loads_float.py") skip_tests.add("extmod/urandom_extra_float.py") skip_tests.add("misc/rge_sm.py") if upy_float_precision < 32: skip_tests.add( "float/float2int_intbig.py" ) # requires fp32, there's float2int_fp30_intbig.py instead skip_tests.add( "float/string_format.py" ) # requires fp32, there's string_format_fp30.py instead skip_tests.add("float/bytes_construct.py") # requires fp32 skip_tests.add("float/bytearray_construct.py") # requires fp32 if upy_float_precision < 64: skip_tests.add("float/float_divmod.py") # tested by float/float_divmod_relaxed.py instead skip_tests.add("float/float2int_doubleprec_intbig.py") skip_tests.add("float/float_format_ints_doubleprec.py") skip_tests.add("float/float_parse_doubleprec.py") if not has_complex: skip_tests.add("float/complex1.py") skip_tests.add("float/complex1_intbig.py") skip_tests.add("float/complex_special_methods.py") skip_tests.add("float/int_big_float.py") skip_tests.add("float/true_value.py") skip_tests.add("float/types.py") skip_tests.add("float/complex_dunder.py") if not has_coverage: skip_tests.add("cmdline/cmd_parsetree.py") skip_tests.add("cmdline/repl_sys_ps1_ps2.py") # Some tests shouldn't be run on a PC if args.target == "unix": # unix build does not have the GIL so can't run thread mutation tests for t in tests: if t.startswith("thread/mutate_"): skip_tests.add(t) # Some tests shouldn't be run on pyboard if args.target != "unix": skip_tests.add("basics/exception_chain.py") # warning is not printed skip_tests.add("micropython/meminfo.py") # output is very different to PC output skip_tests.add("extmod/machine_mem.py") # raw memory access not supported if args.target == "wipy": skip_tests.add("misc/print_exception.py") # requires error reporting full skip_tests.update( { "extmod/uctypes_%s.py" % t for t in "bytearray le native_le ptr_le ptr_native_le sizeof sizeof_native array_assign_le array_assign_native_le".split() } ) # requires uctypes skip_tests.add("extmod/zlibd_decompress.py") # requires zlib skip_tests.add("extmod/uheapq1.py") # uheapq not supported by WiPy skip_tests.add("extmod/urandom_basic.py") # requires urandom skip_tests.add("extmod/urandom_extra.py") # requires urandom elif args.target == "esp8266": skip_tests.add("misc/rge_sm.py") # too large elif args.target == "minimal": skip_tests.add("basics/class_inplace_op.py") # all special methods not supported skip_tests.add( "basics/subclass_native_init.py" ) # native subclassing corner cases not support skip_tests.add("misc/rge_sm.py") # too large skip_tests.add("micropython/opt_level.py") # don't assume line numbers are stored elif args.target == "nrf": skip_tests.add("basics/memoryview1.py") # no item assignment for memoryview skip_tests.add("extmod/urandom_basic.py") # unimplemented: urandom.seed skip_tests.add("micropython/opt_level.py") # no support for line numbers skip_tests.add("misc/non_compliant.py") # no item assignment for bytearray for t in tests: if t.startswith("basics/io_"): skip_tests.add(t) elif args.target == "renesas-ra": skip_tests.add( "extmod/utime_time_ns.py" ) # RA fsp rtc function doesn't support nano sec info elif args.target == "qemu-arm": skip_tests.add("misc/print_exception.py") # requires sys stdfiles # Some tests are known to fail on 64-bit machines if pyb is None and platform.architecture()[0] == "64bit": pass # Some tests use unsupported features on Windows if os.name == "nt": skip_tests.add("import/import_file.py") # works but CPython prints forward slashes # Some tests are known to fail with native emitter # Remove them from the below when they work if args.emit == "native": skip_tests.update( {"basics/%s.py" % t for t in "gen_yield_from_close generator_name".split()} ) # require raise_varargs, generator name skip_tests.update( {"basics/async_%s.py" % t for t in "with with2 with_break with_return".split()} ) # require async_with skip_tests.update( {"basics/%s.py" % t for t in "try_reraise try_reraise2".split()} ) # require raise_varargs skip_tests.add("basics/annotate_var.py") # requires checking for unbound local skip_tests.add("basics/del_deref.py") # requires checking for unbound local skip_tests.add("basics/del_local.py") # requires checking for unbound local skip_tests.add("basics/exception_chain.py") # raise from is not supported skip_tests.add("basics/fun_name.py") # requires proper names for native functions skip_tests.add("basics/scope_implicit.py") # requires checking for unbound local skip_tests.add("basics/sys_tracebacklimit.py") # requires traceback info skip_tests.add("basics/try_finally_return2.py") # requires raise_varargs skip_tests.add("basics/unboundlocal.py") # requires checking for unbound local skip_tests.add("extmod/uasyncio_event.py") # unknown issue skip_tests.add("extmod/uasyncio_lock.py") # requires async with skip_tests.add("extmod/uasyncio_micropython.py") # unknown issue skip_tests.add("extmod/uasyncio_wait_for.py") # unknown issue skip_tests.add("misc/features.py") # requires raise_varargs skip_tests.add( "misc/print_exception.py" ) # because native doesn't have proper traceback info skip_tests.add("misc/sys_exc_info.py") # sys.exc_info() is not supported for native skip_tests.add("misc/sys_settrace_features.py") # sys.settrace() not supported skip_tests.add("misc/sys_settrace_generator.py") # sys.settrace() not supported skip_tests.add("misc/sys_settrace_loop.py") # sys.settrace() not supported skip_tests.add( "micropython/emg_exc.py" ) # because native doesn't have proper traceback info skip_tests.add( "micropython/heapalloc_traceback.py" ) # because native doesn't have proper traceback info skip_tests.add( "micropython/opt_level_lineno.py" ) # native doesn't have proper traceback info skip_tests.add("micropython/schedule.py") # native code doesn't check pending events skip_tests.add("stress/bytecode_limit.py") # bytecode specific test def run_one_test(test_file): test_file = test_file.replace("\\", "/") if args.filters: # Default verdict is the opposit of the first action verdict = "include" if args.filters[0][0] == "exclude" else "exclude" for action, pat in args.filters: if pat.search(test_file): verdict = action if verdict == "exclude": return test_basename = test_file.replace("..", "_").replace("./", "").replace("/", "_") test_name = os.path.splitext(os.path.basename(test_file))[0] is_native = ( test_name.startswith("native_") or test_name.startswith("viper_") or args.emit == "native" ) is_endian = test_name.endswith("_endian") is_int_big = test_name.startswith("int_big") or test_name.endswith("_intbig") is_bytearray = test_name.startswith("bytearray") or test_name.endswith("_bytearray") is_set_type = test_name.startswith(("set_", "frozenset")) or test_name.endswith("_set") is_slice = test_name.find("slice") != -1 or test_name in misc_slice_tests is_async = test_name.startswith(("async_", "uasyncio_")) is_const = test_name.startswith("const") is_io_module = test_name.startswith("io_") is_fstring = test_name.startswith("string_fstring") skip_it = test_file in skip_tests skip_it |= skip_native and is_native skip_it |= skip_endian and is_endian skip_it |= skip_int_big and is_int_big skip_it |= skip_bytearray and is_bytearray skip_it |= skip_set_type and is_set_type skip_it |= skip_slice and is_slice skip_it |= skip_async and is_async skip_it |= skip_const and is_const skip_it |= skip_revops and "reverse_op" in test_name skip_it |= skip_io_module and is_io_module skip_it |= skip_fstring and is_fstring if args.list_tests: if not skip_it: print(test_file) return if skip_it: print("skip ", test_file) skipped_tests.append(test_name) return # get expected output test_file_expected = test_file + ".exp" if os.path.isfile(test_file_expected): # expected output given by a file, so read that in with open(test_file_expected, "rb") as f: output_expected = f.read() else: # run CPython to work out expected output try: output_expected = subprocess.check_output(CPYTHON3_CMD + [test_file]) if args.write_exp: with open(test_file_expected, "wb") as f: f.write(output_expected) except subprocess.CalledProcessError: output_expected = b"CPYTHON3 CRASH" # canonical form for all host platforms is to use \n for end-of-line output_expected = output_expected.replace(b"\r\n", b"\n") if args.write_exp: return # run MicroPython output_mupy = run_micropython(pyb, args, test_file) if output_mupy == b"SKIP\n": print("skip ", test_file) skipped_tests.append(test_name) return testcase_count.add(len(output_expected.splitlines())) filename_expected = os.path.join(result_dir, test_basename + ".exp") filename_mupy = os.path.join(result_dir, test_basename + ".out") if output_expected == output_mupy: print("pass ", test_file) passed_count.increment() rm_f(filename_expected) rm_f(filename_mupy) else: with open(filename_expected, "wb") as f: f.write(output_expected) with open(filename_mupy, "wb") as f: f.write(output_mupy) print("FAIL ", test_file) failed_tests.append(test_name) test_count.increment() if pyb or args.list_tests: num_threads = 1 if num_threads > 1: pool = ThreadPool(num_threads) pool.map(run_one_test, tests) else: for test in tests: run_one_test(test) if args.list_tests: return True print( "{} tests performed ({} individual testcases)".format( test_count.value, testcase_count.value ) ) print("{} tests passed".format(passed_count.value)) skipped_tests = sorted(skipped_tests.value) if len(skipped_tests) > 0: print("{} tests skipped: {}".format(len(skipped_tests), " ".join(skipped_tests))) failed_tests = sorted(failed_tests.value) if len(failed_tests) > 0: print("{} tests failed: {}".format(len(failed_tests), " ".join(failed_tests))) return False # all tests succeeded return True
def __init__(self): super(UnsupportedConfigurationError, self).__init__('System configuration is unsupported: ' + platform.architecture()[0] + ' ' + platform.system())
def is_connected(): try: socket.create_connection(("www.google.com", 80)) return True except OSError: pass return False if is_connected() == False: print('Try running the setup file again when you have an internet connection') sys.exit('No Network Connection Found') t2=0 s4=0 if '32bit' in platform.architecture(): t2+=1 print('32-bit operating system detected.../') elif '64bit' in platform.architecture(): s4+=1 print('64-bit operating system detected.../') else : print('Failed to detect Operating System Architectre') if str(sys.maxsize) == str(2147483647): t2+=1 print('32bit python detected.../') elif str(sys.maxsize) == str(9223372036854775807): s4+=1 print('64bit python detected.../') else : print('Failed to detect Python Architecture')
self.back_state = tk.DISABLED if browser.CanGoForward(): if self.forward_state != tk.NORMAL: self.forward_button.config(state=tk.NORMAL) self.forward_state = tk.NORMAL else: if self.forward_state != tk.DISABLED: self.forward_button.config(state=tk.DISABLED) self.forward_state = tk.DISABLED self.after(100, self.update_state) if __name__ == '__main__': logger.setLevel(_logging.INFO) stream_handler = _logging.StreamHandler() formatter = _logging.Formatter("[%(filename)s] %(message)s") stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) logger.info("CEF Python {ver}".format(ver=cef.__version__)) logger.info("Python {ver} {arch}".format(ver=platform.python_version(), arch=platform.architecture()[0])) logger.info("Tk {ver}".format(ver=tk.Tcl().eval('info patchlevel'))) assert cef.__version__ >= "55.3", "CEF Python v55.3+ required to run this" sys.excepthook = cef.ExceptHook # To shutdown all CEF processes on error root = tk.Tk() app = MainFrame(root) # Tk must be initialized before CEF otherwise fatal error (Issue #306) cef.Initialize() app.mainloop() cef.Shutdown()
def __init__(self): super(UnsupportedPlatformError, self).__init__('Platform is unsupported: ' + platform.architecture()[0] + ' ' + platform.system())
def is_win64(): return sys.platform == "win32" and platform.architecture()[0] == "64bit"
import os import os.path import platform from shutil import copyfile TOOLS_PREFIX='' OS_NAME = platform.system(); MACH = platform.machine(); ARCH = platform.architecture(); is32bit = (ARCH[0] == '32bit'); if is32bit: if MACH == 'i686' or MACH == 'i386': TARGET_ARCH='x86' else: TARGET_ARCH='arm' else: TARGET_ARCH='' print('MACH=' + MACH + ' ARCH=' + str(ARCH) + ' TARGET_ARCH=' + TARGET_ARCH) def joinPath(root, subdir): return os.path.normpath(os.path.join(root, subdir)) TK_ROOT=os.path.dirname(os.path.normpath(os.path.abspath(__file__))) print('TK_ROOT: ' + TK_ROOT); TK_SRC = joinPath(TK_ROOT, 'src') TK_BIN_DIR = joinPath(TK_ROOT, 'bin') TK_LIB_DIR = joinPath(TK_ROOT, 'lib')
from molecular import bl_info from zipfile import ZipFile, ZIP_DEFLATED from os import path, walk, remove, chdir, getcwd import shutil import platform import sys import pathlib from subprocess import Popen, PIPE is_linux = platform.architecture()[1] == "ELF" or platform.system() == "Linux" is_windows = platform.architecture()[1] == "WindowsPE" or platform.system() == "Windows" #in python 3.8.x, sys.abiflags attribute doesnt seem to exist any more instead of returning empty string. #so better check for existence here before accessing it. abiflags = '' if hasattr(sys, 'abiflags'): abiflags = sys.abiflags v = str(sys.version_info.major) + str(sys.version_info.minor) + abiflags name = 'mac' if is_linux: name = 'linux' elif is_windows: name = 'win' chdir(getcwd()+"//sources") #TODO, blenders (or a compatible) python bin needs to be in $PATH, and if you use blender's you need to copy the python includes from SVN #into the include folder of blenders python, too
def setup_heroku_server(task_name): print("Heroku: Collecting files...") # Install Heroku CLI os_name = None bit_architecture = None # Get the platform we are working on platform_info = platform.platform() if 'Darwin' in platform_info: # Mac OS X os_name = 'darwin' elif 'Linux' in platform_info: # Linux os_name = 'linux' else: os_name = 'windows' # Find our architecture bit_architecture_info = platform.architecture()[0] if '64bit' in bit_architecture_info: bit_architecture = 'x64' else: bit_architecture = 'x86' # Remove existing heroku client files existing_heroku_directory_names = glob.glob( os.path.join(parent_dir, 'heroku-cli-*') ) if len(existing_heroku_directory_names) == 0: if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')): os.remove(os.path.join(parent_dir, 'heroku.tar.gz')) # Get the heroku client and unzip os.chdir(parent_dir) sh.wget( shlex.split( '{}-{}-{}.tar.gz -O heroku.tar.gz'.format( heroku_url, os_name, bit_architecture ) ) ) sh.tar(shlex.split('-xvzf heroku.tar.gz')) heroku_directory_name = glob.glob(os.path.join(parent_dir, 'heroku-cli-*'))[0] heroku_directory_path = os.path.join(parent_dir, heroku_directory_name) heroku_executable_path = os.path.join(heroku_directory_path, 'bin', 'heroku') server_source_directory_path = os.path.join( parent_dir, server_source_directory_name ) heroku_server_directory_path = os.path.join( parent_dir, '{}_{}'.format(heroku_server_directory_name, task_name) ) # Delete old server files sh.rm(shlex.split('-rf ' + heroku_server_directory_path)) # Copy over a clean copy into the server directory shutil.copytree(server_source_directory_path, heroku_server_directory_path) print("Heroku: Starting server...") os.chdir(heroku_server_directory_path) sh.git('init') # get heroku credentials heroku_user_identifier = None while not heroku_user_identifier: try: subprocess.check_output(shlex.split(heroku_executable_path + ' auth:token')) heroku_user_identifier = netrc.netrc( os.path.join(os.path.expanduser("~"), '.netrc') ).hosts['api.heroku.com'][0] except subprocess.CalledProcessError: raise SystemExit( 'A free Heroku account is required for launching MTurk tasks. ' 'Please register at https://signup.heroku.com/ and run `{} ' 'login` at the terminal to login to Heroku, and then run this ' 'program again.'.format(heroku_executable_path) ) heroku_app_name = ( '{}-{}-{}'.format( user_name, task_name, hashlib.md5(heroku_user_identifier.encode('utf-8')).hexdigest(), ) )[:30] while heroku_app_name[-1] == '-': heroku_app_name = heroku_app_name[:-1] # Create or attach to the server try: subprocess.check_output( shlex.split('{} create {}'.format(heroku_executable_path, heroku_app_name)), stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: error_text = bytes.decode(e.output) if "Name is already taken" in error_text: # already running this app do_continue = input( 'An app is already running with that name, do you want to ' 'restart a new run with it? (y/N): ' ) if do_continue != 'y': raise SystemExit('User chose not to re-run the app.') else: delete_heroku_server(task_name) try: subprocess.check_output( shlex.split( '{} create {}'.format( heroku_executable_path, heroku_app_name ) ), stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: error_text = bytes.decode(e.output) sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path))) print(error_text) raise SystemExit( 'Something unexpected happened trying to set up the ' 'heroku server - please use the above printed error ' 'to debug the issue however necessary.' ) elif "Delete some apps" in error_text: # too many apps running sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path))) raise SystemExit( 'You have hit your limit on concurrent apps with heroku, ' 'which are required to run multiple concurrent tasks.\nPlease ' 'wait for some of your existing tasks to complete. If you ' 'have no tasks running, login to heroku.com and delete some ' 'of the running apps or verify your account to allow more ' 'concurrent apps.' ) else: sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path))) print(error_text) raise SystemExit( 'Something unexpected happened trying to set up the heroku ' 'server - please use the above printed error to debug the ' 'issue however necessary.' ) # Enable WebSockets try: subprocess.check_output( shlex.split( '{} features:enable http-session-affinity'.format( heroku_executable_path ) ) ) except subprocess.CalledProcessError: # Already enabled WebSockets pass # commit and push to the heroku server os.chdir(heroku_server_directory_path) sh.git(shlex.split('add -A')) sh.git(shlex.split('commit -m "app"')) sh.git(shlex.split('push -f heroku master')) subprocess.check_output( shlex.split('{} ps:scale web=1'.format(heroku_executable_path)) ) os.chdir(parent_dir) # Clean up heroku files if os.path.exists(os.path.join(parent_dir, 'heroku.tar.gz')): os.remove(os.path.join(parent_dir, 'heroku.tar.gz')) sh.rm(shlex.split('-rf {}'.format(heroku_server_directory_path))) return 'https://{}.herokuapp.com'.format(heroku_app_name)
def collect_report(): report_ = {} # Date and uptime report_['date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') report_['tz'] = time.tzname report_['uptime'] = get_uptime() report_['locale'] = get_system_locale() # Python and System import platform report_['architecture'] = ' '.join(platform.architecture()) report_['python_version'] = _fix_version(sys.version_info) report_['uname'] = ' '.join(platform.uname()) report_['system'] = platform.system() if hasattr(platform, 'dist'): report_['distribution'] = ' '.join(platform.dist()) # Stoq application info = get_utility(IAppInfo, None) if info and info.get('name'): report_['app_name'] = info.get('name') report_['app_version'] = _fix_version(info.get('ver')) # External dependencies try: import gi except ImportError: pass else: report_['gtk_version'] = _fix_version(gi.version_info) import kiwi report_['kiwi_version'] = _fix_version(kiwi.__version__.version + (_get_revision(kiwi), )) import psycopg2 try: parts = psycopg2.__version__.split(' ') extra = ' '.join(parts[1:]) report_['psycopg_version'] = _fix_version( list(map(int, parts[0].split('.'))) + [extra]) except Exception: report_['psycopg_version'] = _fix_version(psycopg2.__version__) import reportlab report_['reportlab_version'] = _fix_version(reportlab.Version) import stoqdrivers report_['stoqdrivers_version'] = _fix_version( stoqdrivers.__version__ + (_get_revision(stoqdrivers), )) report_['product_key'] = get_product_key() try: from stoqlib.lib.kiwilibrary import library report_['bdist_type'] = library.bdist_type except Exception: pass # PostgreSQL database server try: from stoqlib.database.settings import get_database_version default_store = get_default_store() report_['postgresql_version'] = _fix_version( get_database_version(default_store)) report_['demo'] = sysparam.get_bool('DEMO_MODE') report_['hash'] = sysparam.get_string('USER_HASH') report_['cnpj'] = get_main_cnpj(default_store) report_['plugins'] = ', '.join( InstalledPlugin.get_plugin_names(default_store)) except Exception: pass # Tracebacks report_['tracebacks'] = {} for i, trace in enumerate(_tracebacks): t = ''.join(traceback.format_exception(*trace)) # Eliminate duplicates: md5sum = hashlib.md5(t.encode()).hexdigest() report_['tracebacks'][md5sum] = t if info and info.get('log'): report_['log'] = open(info.get('log')).read() report_['log_name'] = info.get('log') return report_
def on_x64(): return platform.architecture()[0] == '64bit'
def main(): bits = int(platform.architecture()[0][0:2]) python_major_minor = '{}{}'.format( sys.version_info.major, sys.version_info.minor ) # WARNING: The compiler for Python 3.4 is actually 10 but let's try 12 # because that's what Qt offers msvc_versions = {'34': '12', '35': '14', '36': '14'} msvc_version = msvc_versions[python_major_minor] vs_path = os.path.join( 'C:/', 'Program Files (x86)', 'Microsoft Visual Studio {}.0'.format( msvc_version ) ) os.environ = get_environment_from_batch_command( [ os.path.join(vs_path, 'VC', 'vcvarsall.bat'), {32: 'x86', 64: 'x64'}[bits] ], initial=os.environ ) compiler_name = 'msvc' compiler_year = { '9': '2008', '10': '2010', '11': '2012', '12': '2013', '14': '2015', '14.1': '2017', }[msvc_version] compiler_bits_string = {32: '', 64: '_64'}[bits] compiler_dir = ''.join((compiler_name, compiler_year, compiler_bits_string)) qt_bin_path = os.path.join(os.environ['QT_BASE_PATH'], compiler_dir, 'bin') os.environ['PATH'] = os.pathsep.join((os.environ['PATH'], qt_bin_path)) with open('setup.cfg', 'w') as cfg: plat_names = { 32: 'win32', 64: 'win_amd64' } try: plat_name = plat_names[bits] except KeyError: raise Exception('Bit depth {bits} not recognized {}'.format(plat_names.keys())) python_tag = 'cp{major}{minor}'.format( major=sys.version_info[0], minor=sys.version_info[1], ) cfg.write( '''[bdist_wheel] python-tag = {python_tag} plat-name = {plat_name}'''.format(**locals())) build = os.environ['APPVEYOR_BUILD_FOLDER'] destination = os.path.join(build, 'pyqt5-tools') os.makedirs(destination, exist_ok=True) build_id = os.environ['APPVEYOR_BUILD_ID'] with open(os.path.join(destination, 'build_id'), 'w') as f: f.write(build_id + '\n') job_id = os.environ['APPVEYOR_JOB_ID'] with open(os.path.join(destination, 'job_id'), 'w') as f: f.write(job_id + '\n') windeployqt_path = os.path.join(qt_bin_path, 'windeployqt.exe'), application_paths = glob.glob(os.path.join(qt_bin_path, '*.exe')) os.makedirs(destination, exist_ok=True) for application in application_paths: application_path = os.path.join(qt_bin_path, application) print('\n\nChecking: {}'.format(os.path.basename(application))) try: output = subprocess.check_output( [ windeployqt_path, application_path, '--dry-run', '--list', 'source', ], cwd=destination, ) except subprocess.CalledProcessError: continue if b'WebEngine' in output: print(' skipped') continue shutil.copy(application_path, destination) report_and_check_call( command=[ windeployqt_path, os.path.basename(application), ], cwd=destination, ) platform_path = os.path.join(destination, 'platforms') os.makedirs(platform_path, exist_ok=True) for platform_plugin in ('minimal',): shutil.copy( os.path.join( os.environ['QT_BASE_PATH'], compiler_dir, 'plugins', 'platforms', 'q{}.dll'.format(platform_plugin), ), platform_path, ) sysroot = os.path.join(build, 'sysroot') os.makedirs(sysroot) nmake = os.path.join(vs_path, 'VC', 'BIN', 'nmake'), qmake = os.path.join(qt_bin_path, 'qmake.exe') print('qmake: {}'.format(qmake)) src = os.path.join(build, 'src') os.makedirs(src) venv_bin = os.path.join(build, 'venv', 'Scripts') native = os.path.join(sysroot, 'native') os.makedirs(native) report_and_check_call( command=[ os.path.join(venv_bin, 'pyqtdeploycli'), '--sysroot', sysroot, '--package', 'python', '--system-python', '.'.join(python_major_minor), 'install', ], ) pyqt5_version = os.environ['PYQT5_VERSION'] # sip_version = next( # d.version # for d in pip.utils.get_installed_distributions() # if d.project_name == 'sip' # ) sip_version = { '5.5.1': '4.17', '5.6': '4.19', '5.7.1': '4.19', '5.8.2': '4.19.2', '5.9': '4.19.7', '5.10': '4.19.7', }[pyqt5_version] sip_name = 'sip-{}'.format(sip_version) r = requests.get( 'http://downloads.sourceforge.net' '/project/pyqt/sip/sip-{}/{}.zip'.format( sip_version, sip_name ) ) z = zipfile.ZipFile(io.BytesIO(r.content)) z.extractall(path=src) sip = os.path.join(src, sip_name) native_sip = sip + '-native' shutil.copytree(os.path.join(src, sip_name), native_sip) os.environ['CL'] = '/I"{}\\include\\python{}"'.format( sysroot, '.'.join(python_major_minor) ) year = compiler_year if year == '2013': year = '2010' report_and_check_call( command=[ os.path.join(venv_bin, 'python'), 'configure.py', '--static', '--sysroot={}'.format(native), '--platform=win32-{}{}'.format(compiler_name, year), '--target-py-version={}'.format('.'.join(python_major_minor)), ], cwd=native_sip, ) report_and_check_call( command=[ nmake, ], cwd=native_sip, env=os.environ, ) report_and_check_call( command=[ nmake, 'install', ], cwd=native_sip, env=os.environ, ) report_and_check_call( command=[ os.path.join(venv_bin, 'pyqtdeploycli'), '--package', 'sip', '--target', 'win-{}'.format(bits), 'configure', ], cwd=sip, ) report_and_check_call( command=[ os.path.join(venv_bin, 'python'), 'configure.py', '--static', '--sysroot={}'.format(sysroot), '--no-tools', '--use-qmake', '--configuration=sip-win.cfg', '--platform=win32-{}{}'.format(compiler_name, year), '--target-py-version={}'.format('.'.join(python_major_minor)), ], cwd=sip, ) report_and_check_call( command=[ qmake, ], cwd=sip, env=os.environ, ) report_and_check_call( command=[ nmake, ], cwd=sip, env=os.environ, ) report_and_check_call( command=[ nmake, 'install', ], cwd=sip, env=os.environ, ) if tuple(int(x) for x in pyqt5_version.split('.')) >= (5, 6): pyqt5_name = 'PyQt5_gpl-{}'.format(pyqt5_version) else: pyqt5_name = 'PyQt-gpl-{}'.format(pyqt5_version) r = requests.get( 'http://downloads.sourceforge.net' '/project/pyqt/PyQt5/PyQt-{}/{}.zip'.format( pyqt5_version, pyqt5_name ) ) z = zipfile.ZipFile(io.BytesIO(r.content)) z.extractall(path=src) pyqt5 = os.path.join(src, pyqt5_name) # TODO: make a patch for the lower versions as well if tuple(int(x) for x in pyqt5_version.split('.')) >= (5, 7): report_and_check_call( command='patch -p 1 < ..\\..\\pluginloader.patch', shell=True, # TODO: don't do this cwd=pyqt5, ) report_and_check_call( command=[ os.path.join(venv_bin, 'pyqtdeploycli'), '--package', 'pyqt5', '--target', 'win-{}'.format(bits), 'configure', ], cwd=pyqt5, ) pyqt5_cfg = os.path.join(pyqt5, 'pyqt5-win.cfg') with open(pyqt5_cfg) as f: original = io.StringIO(f.read()) with open(pyqt5_cfg, 'w') as f: f.write('\npy_pyshlib = python{}.dll\n'.format( python_major_minor, )) for line in original: if line.startswith('py_pylib_lib'): f.write('py_pylib_lib = python%(py_major)%(py_minor)\n') else: f.write(line) designer_pro = os.path.join(pyqt5, 'designer', 'designer.pro-in') with open(designer_pro, 'a') as f: f.write('\nDEFINES += PYTHON_LIB=\'"\\\\\\"@PYSHLIB@\\\\\\""\'\n') command = [ os.path.join(venv_bin, 'python'), r'configure.py', r'--static', r'--sysroot={}'.format(sysroot), r'--no-tools', r'--no-qsci-api', r'--no-qml-plugin', r'--configuration={}'.format(pyqt5_cfg), r'--confirm-license', r'--sip={}\sip.exe'.format(native), r'--bindir={}\pyqt5-install\bin'.format(sysroot), r'--destdir={}\pyqt5-install\dest'.format(sysroot), r'--designer-plugindir={}\pyqt5-install\designer'.format(sysroot), r'--enable=QtDesigner', '--target-py-version={}'.format('.'.join(python_major_minor)), ] if tuple(int(x) for x in pyqt5_version.split('.')) >= (5, 6): command.append(r'--qmake={}'.format(qmake)) report_and_check_call( command=command, cwd=pyqt5, env=os.environ, ) report_and_check_call( command=[ qmake ], cwd=pyqt5, env=os.environ, ) sys.stderr.write('another stderr test from {}\n'.format(__file__)) report_and_check_call( command=[ nmake, ], cwd=pyqt5, env=os.environ, ) report_and_check_call( command=[ nmake, 'install', ], cwd=pyqt5, env=os.environ, ) designer_plugin_path = os.path.join(sysroot, 'pyqt5-install', 'designer', 'pyqt5.dll') designer_plugin_path = os.path.expandvars(designer_plugin_path) designer_plugin_destination = os.path.join(destination, 'plugins', 'designer') os.makedirs(designer_plugin_destination, exist_ok=True) shutil.copy(designer_plugin_path, designer_plugin_destination) shutil.copy(os.path.join(pyqt5, 'LICENSE'), os.path.join(destination, 'LICENSE.pyqt5')) # Since windeployqt doesn't actually work with --compiler-runtime, # copy it ourselves plat = {32: 'x86', 64: 'x64'}[bits] redist_path = os.path.join( vs_path, 'VC', 'redist', plat, 'Microsoft.VC{}0.CRT'.format(msvc_version) ) redist_files = [ 'msvcp{}0.dll'.format(msvc_version), ] if int(msvc_version) >= 14: redist_files.append('vcruntime{}0.dll'.format(msvc_version)) else: redist_files.append('msvcr{}0.dll'.format(msvc_version)) for file in redist_files: dest = os.path.join(destination, file) shutil.copyfile(os.path.join(redist_path, file), dest) os.chmod(dest, stat.S_IWRITE) redist_license = os.path.join('pyqt5-tools', 'REDIST.visual_cpp_build_tools') redist_license_html = redist_license + '.html' with open(redist_license, 'w') as redist: redist.write( '''The following filings are being distributed under the Microsoft Visual C++ Build Tools license linked below. {files} https://www.visualstudio.com/en-us/support/legal/mt644918 For a local copy see: {license_file} '''.format(files='\n'.join(redist_files), license_file=os.path.basename(redist_license_html))) r = requests.get('https://www.visualstudio.com/DownloadEula/en-us/mt644918') c = io.StringIO(r.text) with open(redist_license_html, 'w') as f: f.write(c.read())
def arch(): return str(platform.architecture()[0])
def find_libclang_dir(clang_binary, libclang_path): """Find directory with libclang. Args: clang_binary (str): clang binary to call libclang_path (str): libclang path provided by user. Does not have to be valid. Returns: str: folder with libclang """ stdin = None stderr = None log.debug(" platform: %s", platform.architecture()) log.debug(" python version: %s", platform.python_version()) current_system = platform.system() log.debug(" we are on '%s'", platform.system()) log.debug(" user provided libclang_path: %s", libclang_path) # Get version string for help finding the proper libclang library on Linux if libclang_path: # User thinks he knows better. Let him try his luck. libclang_dir = ClangUtils.try_load_from_user_hint(libclang_path) if libclang_dir: # It was found! No need to search any further! ClangUtils.libclang_name = path.basename(libclang_path) log.info(" using user-provided libclang: '%s'", libclang_path) return libclang_dir # If the user hint did not work, we look for it normally if current_system == "Linux": version_str = settings_storage.SettingsStorage.CLANG_VERSION[:-2] for suffix in ClangUtils.suffixes[current_system]: # pick a name for a file for name in ClangUtils.possible_filenames[current_system]: file = "{name}{suffix}".format(name=name, suffix=suffix) log.debug(" searching for: '%s'", file) startupinfo = None # let's find the library if platform.system() == "Darwin": # [HACK]: wtf??? why does it not find libclang.dylib? get_library_path_cmd = [clang_binary, "-print-file-name="] elif platform.system() == "Windows": get_library_path_cmd = [clang_binary, "-print-prog-name="] # Don't let console window pop-up briefly. startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW startupinfo.wShowWindow = subprocess.SW_HIDE stdin = subprocess.PIPE stderr = subprocess.PIPE elif platform.system() == "Linux": file = file.replace("$version", version_str) get_library_path_cmd = [ clang_binary, "-print-file-name={}".format(file) ] output = subprocess.check_output( get_library_path_cmd, stdin=stdin, stderr=stderr, startupinfo=startupinfo).decode('utf8').strip() log.debug(" libclang search output = '%s'", output) if output: libclang_dir = ClangUtils.dir_from_output(output) if path.isdir(libclang_dir): full_libclang_path = path.join(libclang_dir, file) if path.exists(full_libclang_path): log.info(" found libclang library file: '%s'", full_libclang_path) ClangUtils.libclang_name = file return libclang_dir log.warning(" clang could not find '%s'", file) # if we haven't found anything there is nothing to return log.error(" no libclang found at all") return None
def get_architecture(): '''获取操作系统的位数''' return platform.architecture()
def on_x86(): return platform.architecture()[0] == '32bit'
from __future__ import (absolute_import, division, print_function, unicode_literals) import atexit import json import os import platform import re import shlex import shutil import subprocess import sys import tempfile py3 = sys.version_info[0] > 2 is64bit = platform.architecture()[0] == '64bit' is_macos = 'darwin' in sys.platform.lower() try: __file__ from_file = True except NameError: from_file = False if py3: unicode = str raw_input = input import urllib.request as urllib def encode_for_subprocess(x): return x
cixtype = arg[eqsign + 1:len(arg)] elif arg.lower().startswith("synonyms="): eqsign = arg.find('=') synonyms = arg[eqsign + 1:len(arg)] else: where = where + ' ' + arg else: raise Exception( "xsDisplayWhere-F-001: Usage: xsDisplayWhere [alfacut=value] field [IN context] is [modifier, ...] concept [and, or]" ) if where == ' ': raise Exception("xsDisplayWhere-F-003: No query") binary = os.path.dirname(sys.argv[0]) + "/" + platform.system( ) + "/" + platform.architecture()[0] + "/xsDisplayWhere" if (platform.system() == 'Windows'): binary = binary + ".exe" if not os.path.isfile(binary): raise Exception("xsDisplayWhere-F-000: Can't find binary file " + binary) subprocess.call([ binary, '-a', alfacut, '-c', cixtype, '-n', cixname, '-s', synonyms, '-w', where ]) if platform.system() == 'Windows': sys.stdout.flush() time.sleep(1.0)
def version(): """Return a string with various version informations.""" lines = ["qutebrowser v{}".format(qutebrowser.__version__)] gitver = _git_str() if gitver is not None: lines.append("Git commit: {}".format(gitver)) lines.append("Backend: {}".format(_backend())) lines += [ '', '{}: {}'.format(platform.python_implementation(), platform.python_version()), 'Qt: {}'.format(qt_version()), 'PyQt: {}'.format(PYQT_VERSION_STR), '', ] lines += _module_versions() lines += ['pdf.js: {}'.format(_pdfjs_version())] lines += [ 'SSL: {}'.format(QSslSocket.sslLibraryVersionString()), '', ] qapp = QApplication.instance() if qapp: style = qapp.style() lines.append('Style: {}'.format(style.metaObject().className())) importpath = os.path.dirname(os.path.abspath(qutebrowser.__file__)) lines += [ 'Platform: {}, {}'.format(platform.platform(), platform.architecture()[0]), ] dist = distribution() if dist is not None: lines += [ 'Linux distribution: {} ({})'.format(dist.pretty, dist.parsed.name) ] lines += [ 'Frozen: {}'.format(hasattr(sys, 'frozen')), "Imported from {}".format(importpath), "Qt library executable path: {}, data path: {}".format( QLibraryInfo.location(QLibraryInfo.LibraryExecutablesPath), QLibraryInfo.location(QLibraryInfo.DataPath)) ] if not dist or dist.parsed == Distribution.unknown: lines += _os_info() lines += [ '', 'Paths:', ] for name, path in _path_info().items(): lines += ['{}: {}'.format(name, path)] return '\n'.join(lines)
FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' for key in profile: if hasattr(pl, key): print(key + bcolors.BOLD + ": " + str(getattr(pl, key)()) + bcolors.ENDC) """ import platform os = platform.system() print(os) print(platform.platform()) print(platform.version()) print(platform.architecture()) ''' python中,platform模块给我们提供了很多方法去获取操作系统的信息 如: import platform platform.platform() #获取操作系统名称及版本号,'Linux-3.13.0-46-generic-i686-with-Deepin-2014.2-trusty' platform.version() #获取操作系统版本号,'#76-Ubuntu SMP Thu Feb 26 18:52:49 UTC 2015' platform.architecture() #获取操作系统的位数,('32bit', 'ELF') platform.machine() #计算机类型,'i686' platform.node() #计算机的网络名称,'XF654' platform.processor() #计算机处理器信息,''i686' platform.uname() #包含上面所有的信息汇总,('Linux', 'XF654', '3.13.0-46-generic', '#76-Ubuntu SMP Thu Feb 26 18:52:49 UTC 2015', 'i686', 'i686') 还可以获得计算机中python的一些信息: import platform
os.makedirs(UUID_VAR_PATH, exist_ok=True) if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')): os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')) os.symlink(os.path.join(ZULIP_PATH, 'README.md'), os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')) os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')) except OSError: print(FAIL + "Error: Unable to create symlinks." "Make sure you have permission to create symbolic links." + ENDC) print("See this page for more information:") print( " https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error" ) sys.exit(1) if platform.architecture()[0] == '64bit': arch = 'amd64' elif platform.architecture()[0] == '32bit': arch = "i386" else: logging.critical( "Only x86 is supported;" " ask on chat.zulip.org if you want another architecture.") # Note: It's probably actually not hard to add additional # architectures. sys.exit(1) distro_info = parse_os_release() vendor = distro_info['ID'] os_version = distro_info['VERSION_ID'] if vendor == "debian" and os_version == "10": # buster
def Architecture(self): return platform.architecture()[0]
new_following # In[32]: driver.quit() # Doc : # - Regex hastag : # https://stackoverflow.com/questions/38506598/regular-expression-to-match-hashtag-but-not-hashtag-with-semicolon # https://stackoverflow.com/questions/2527892/parsing-a-tweet-to-extract-hashtags-into-an-array-in-python # # # - Run script when booting, automatically : # linux reboot and lauch a program with terminal # https://unix.stackexchange.com/questions/19634/what-is-the-linux-equivalent-of-windows-startup # https://stackoverflow.com/questions/7221757/run-automatically-program-on-startup-under-linux-ubuntu # https://stackoverflow.com/questions/24518522/run-python-script-at-startup-in-ubuntu # # In[5]: import platform platform.architecture() # In[22]: import time time.localtime() # In[ ]:
lastItem = item return r CCFX_PREPDIR = ".ccfxprepdir" # walkaround to avoid \x5c character problem in os.path.split def os_path_split(pathstr, cnv): s = cnv.decode(pathstr) d, f = os.path.split(s) return cnv.encode(d), cnv.encode(f) # walkaround to avoid limit of length of file path in Windows if platform.architecture() == ('32bit', 'WindowsPE'): import win32getshortname __converter_file_funcs = easytorq.ICUConverter() __converter_file_funcs.setencoding("char") def __shorten(filepath): dirName, fileName = os_path_split(filepath, __converter_file_funcs) shortDirName = win32getshortname.getShortPathName( dirName) # may return None if shortDirName: fp = os.path.join(shortDirName, fileName) if fp: return fp return filepath
def get_bitness(): bits, _ = architecture() return '32' if bits == '32bit' else '64' if bits == '64bit' else None
os.system(sysnative + '\\secedit /refreshpolicy machine_policy /enforce') else: os.system(sysnative + '\\gpupdate /target:computer /force') if '__main__' == __name__: if os.name != 'nt': sys.exit(0) windir = os.environ.get('windir') if not windir: sys.exit(-1) if not is_admin(): runas() sys.exit(0) sys64 = os.path.exists(windir + '\\SysWOW64') pe32 = platform.architecture()[0] == '32bit' sysver = platform.version() if sys64 and sysver < '6': # Windows Server 2003 and Windows XP has no Sysnative alias if os.path.exists('reset_gp.exe'): os.system('reset_gp.exe') else: print("Unable to check Teredo's Group Policy, please check it manually") else: sysalias = 'Sysnative' if sys64 and pe32 else 'System32' sysnative = '%s\\%s' % (windir, sysalias) gp_regpol_file = sysnative + '\\GroupPolicy\\Machine\\Registry.pol' reset_teredo()
import nibabel from nibabel import Nifti1Image import numpy as np from numpy.testing import assert_array_equal, assert_allclose from nilearn._utils.testing import assert_raises_regex from nilearn.image import image from nilearn.image import resampling from nilearn.image import concat_imgs from nilearn._utils import testing, niimg_conversions from nilearn.image import new_img_like from nilearn.image import threshold_img from nilearn.image import iter_img from nilearn.image import math_img X64 = (platform.architecture()[0] == '64bit') currdir = os.path.dirname(os.path.abspath(__file__)) datadir = os.path.join(currdir, 'data') def test_high_variance_confounds(): # See also test_signals.test_high_variance_confounds() # There is only tests on what is added by image.high_variance_confounds() # compared to signal.high_variance_confounds() shape = (40, 41, 42) length = 17 n_confounds = 10 img, mask_img = testing.generate_fake_fmri(shape=shape, length=length)
#!/usr/bin/env python ## ############### ## @author kidd ## ## Separate out Xerces constants ## so that they can be easily ## found by other apps. import platform Xerces = { 'Darwin' : ('xerces-c-3.1.1-x86-macosx-gcc-4.0', 'xerces-c-3.1.1-x86-macosx-gcc-4.0'), 'Linux' : ('xerces-c-3.1.1-x86-linux-gcc-3.4', 'xerces-c-3.1.1-x86_64-linux-gcc-3.4'), 'Windows' : ('xerces-c-3.1.1-x86-windows-vc-9.0', 'xerces-c-3.1.1-x86_64-windows-vc-9.0') } (bits,linkage) = platform.architecture() sys = platform.system() IsZip = sys == 'Windows' def get_name(is64): return Xerces[sys][is64]