def MakeEnv(root): arch = platform.uname()[0] subarch = platform.uname()[4] if not root['sequential']: if arch == 'Windows': mpidir = root['MPIDIR'] if os.path.exists (mpidir + "\\lib\\i386\\msmpi.lib"): print "Found MSMPI in " + mpidir ## TODO add support for HPC pack here root.Append(CPPPATH = mpidir+"\\Include", LIBS = ["msmpi.lib", "msmpe.lib"] ) if subarch == 'AMD64': root.Append( LIBPATH = mpidir+"\\Lib\\amd64" ) else: root.Append( LIBPATH = mpidir+"\\Lib\\i386" ) else: print "Found other MPI in " + mpidir root.Append( CPPPATH = mpidir+"\\include", LIBPATH = mpidir+"\\lib" , LIBS = ["mpi", "mpi_cxx"] ) else: root.Replace( CXX = root['MPICXX'], LINK = root['MPILINK'], CC = root['MPICC'] )
def print_platform_info(): print pl.system() # which OS print get_os_release() # which OS release print get_arch() # which architecture is in use print get_kernel_version() # print kernel version print sys.version.split(" ")[0] # print python version in use print pl.uname()[1] # print computer name
def open(self, app=None): """ Routine to open the rendered image or folder of images from the filesystem. """ self.render() if not app and self.app: app = self.app if os.name == 'nt': if app: self.msg('Overriding default image viewer not supported on Win32') call('start %s' % self.image.replace('/','\\')) elif platform.uname()[0] == 'Linux': if app: call('%s %s' % (app,self.image)) else: # make blind and dumb attempt to open images, but don't block while open try: cmd = 'xdg-open %s' % self.image Popen(cmd.split(' ')) except OSError: try: cmd = 'gthumb %s' % self.image Popen(cmd.split(' ')) except OSError: try: cmd = 'display %s' % self.image Popen(cmd.split(' ')) except OSError: pass elif platform.uname()[0] == 'Darwin': if app: call('open %s -a %s' % (self.image, app)) else: call('open %s' % self.image)
def open_file(file, force_text=False): """Open the file in the platform's native editor/viewer. @param file: The path of the file to open. @type file: str @keyword force_text: A flag which if True will cause a text editor to be launched. @type force_text: bool """ # Windows. if platform.uname()[0] in ['Windows', 'Microsoft']: # Text file. if force_text: os.system('notepad %s' % os.path.normpath(file)) # All other files. else: os.startfile(os.path.normpath(file)) # Mac OS X. elif platform.uname()[0] == 'Darwin': # Text file. if force_text: os.system('open -t %s' % file) # All other files. else: os.system('open %s' % file) # POSIX Systems with xdg-open. else: os.system('/usr/bin/xdg-open %s' % file)
def check_environ(product): if product not in ("firefox", "servo"): expected_hosts = {".".join(x) for x in serve.get_subdomains("web-platform.test").values()} expected_hosts |= {".".join(x) for x in serve.get_not_subdomains("web-platform.test").values()} missing_hosts = set(expected_hosts) if platform.uname()[0] != "Windows": hosts_path = "/etc/hosts" else: hosts_path = "C:\Windows\System32\drivers\etc\hosts" with open(hosts_path, "r") as f: for line in f: line = line.split("#", 1)[0].strip() parts = line.split() hosts = parts[1:] for host in hosts: missing_hosts.discard(host) if missing_hosts: if platform.uname()[0] != "Windows": message = """Missing hosts file configuration. Run python wpt make-hosts-file >> %s from a shell with Administrator privileges.""" % hosts_path else: message = """Missing hosts file configuration. Run ./wpt make-hosts-file | sudo tee -a %s""" % hosts_path raise WptrunError(message)
def user_agent(): """Return the user agent string""" __platform = platform.system() # Linux or Windows __os = platform.uname()[2] # e.g., 2.6.28-12-generic or XP if sys.platform == "win32": # misleading: Python 2.5.4 shows uname()[2] as Vista on Windows 7 __os = platform.uname()[3][0:3] # 5.1 = Windows XP, 6.0 = Vista, 6.1 = 7 elif sys.platform.startswith('linux'): dist = platform.dist() # example: ('fedora', '11', 'Leonidas') # example: ('', '', '') for Arch Linux if 0 < len(dist[0]): __os = dist[0] + '/' + dist[1] + '-' + dist[2] elif sys.platform[:6] == 'netbsd': __sys = platform.system() mach = platform.machine() rel = platform.release() __os = __sys + '/' + mach+ ' ' + rel __locale = "" try: import locale __locale = locale.getdefaultlocale()[0] # e.g., en_US except: traceback.print_exc() agent = "BleachBit/%s (%s; %s; %s)" % (Common.APP_VERSION, \ __platform, __os, __locale) return agent
def cleanupHtml(self, txt): # def spacerepl(matchobj): return matchobj.group(0).replace(b' ', b' ') # Replacing all spaces with txt = re.sub(b'>([^<>]+)<(?!/style>)', spacerepl, txt) # Write the new file open('doc.htm', 'w').write(txt) # Process the file with Tidy if platform.uname()[0].lower() == 'windows': p = subprocess.Popen(['tidy.exe', '-config', 'tidy.txt', 'doc.htm']).wait() elif platform.uname()[0].lower() == 'linux': env = os.environ env.update({'LD_LIBRARY_PATH': os.getcwd()}) p = subprocess.Popen(['./tidy', '-config', 'tidy.txt', 'doc.htm'], env=env).wait() else: print('Platform `%s` is not supported yet!\n' % platform.uname()[0]) txt = open('doc.htm', 'r').read() # Delete the wrong/ obsolete tags txt = txt.replace(b'<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"\n"http://www.w3.org/TR/html4/loose.dtd">\n', '') txt = txt.replace(b'<meta name="generator" content="HTML Tidy for Windows (vers 25 March 2009), see www.w3.org">\n', '') txt = txt.replace(b'<meta name="generator" content="HTML Tidy for Linux/x86 (vers 25 March 2009), see www.w3.org">\n', '') txt = txt.replace(b'<meta name="generator" content="HTML Tidy for Linux (vers 25 March 2009), see www.w3.org">\n', '') txt = txt.replace(b'<meta name="qrichtext" content="1">\n', '') txt = txt.replace(b'<title></title>\n', '') txt = txt.replace(b'</style>\n\n<style type="text/css">\n', '') txt = txt.replace(b'<br>\n', '\n') # The clean file, for debug... open('doc.htm', 'w').write(txt) return txt
def load_plugins(): """ Helper function that attempts to load all the plugins """ # provide some info about the env in use import platform log.debug("Python %s %s on %s %s (%s)" % (platform.python_version(), platform.architecture()[0], platform.uname()[0], platform.uname()[2], platform.uname()[4])) import numpy log.debug("numpy %s" % numpy.__version__) log.debug("matplotlib %s" % matplotlib.__version__) log.debug("wxPython %s" % wx.__version__) from hdf_compass import compass_model try: from hdf_compass import filesystem_model except ImportError: log.warning("Filesystem plugin: NOT loaded") try: from hdf_compass import array_model except ImportError: log.warning("Array plugin: NOT loaded") try: from hdf_compass import hdf5_model import h5py log.debug("h5py %s" % h5py.__version__) except ImportError: log.warning("HDF5 plugin: NOT loaded") try: from hdf_compass import bag_model from hydroffice import bag from lxml import etree log.debug("hydroffice.bag %s" % bag.__version__) log.debug("lxml %s (libxml %s, libxslt %s)" % (etree.__version__, ".".join(str(i) for i in etree.LIBXML_VERSION), ".".join(str(i) for i in etree.LIBXSLT_VERSION))) except (ImportError, OSError): log.warning("BAG plugin: NOT loaded") try: from hdf_compass import asc_model except ImportError: log.warning("Ascii grid: plugin NOT loaded") try: from hdf_compass import opendap_model from pydap import lib log.debug("pydap %s (protocol %s)" % (".".join(str(i) for i in lib.__version__), ".".join(str(i) for i in lib.__dap__))) except ImportError: log.warning("Opendap plugin: NOT loaded") from hdf_compass import hdf5rest_model try: from hdf_compass import hdf5rest_model except ImportError: log.warning("HDF5 REST: plugin NOT loaded")
def __init__(self, interface): if(os.name=='posix'): #unix if(os.uname()[1]=='raspberrypi'): self.osname='pi' else: self.osname='unix' else: if(platform.uname()[2] == 'XP'): self.osname='win' else: os_ver=int(platform.uname()[2]) #print 'os_ver=', os_ver #if(os_ver >= 10): if(os_ver >= 8): #You might get wrong OS version here(when OpenWave-2KE.exe is running), especially for Win 10. self.osname='win10' else: self.osname='win' if(interface != ''): self.connect(interface) else: self.chnum=4 self.connection_status=0 global inBuffer self.ver=__version__ #Driver version. self.iWave=[[], [], [], []] self.vdiv=[[], [], [], []] self.vunit=[[], [], [], []] self.dt=[[], [], [], []] self.vpos=[[], [], [], []] self.hpos=[[], [], [], []] self.ch_list=[] self.info=[[], [], [], []] generate_lut()
def open(self, app=None): """ Routine to open the rendered image or folder of images from the filesystem. """ self.render() if not app and self.app: app = self.app if os.name == 'nt': if app: self.msg('Overriding default image viewer not supported on Win32') self.call('start %s' % self.image.replace('/','\\')) elif platform.uname()[0] == 'Linux': if app: self.call('%s %s' % (app, self.image)) else: resp = self.call('xdg-open %s' % self.image) if not resp: resp = self.call('gthumb %s' % self.image) if not resp: self.call('display %s' % self.image) elif platform.uname()[0] == 'Darwin': if app: self.call('open %s -a %s' % (self.image, app)) else: self.call('open %s' % self.image)
def user_agent(): """Return the user agent string""" __platform = platform.system() # Linux or Windows __os = platform.uname()[2] # e.g., 2.6.28-12-generic or XP if sys.platform == "win32": # misleading: Python 2.5.4 shows uname()[2] as Vista on Windows 7 __os = platform.uname()[3][ 0:3] # 5.1 = Windows XP, 6.0 = Vista, 6.1 = 7 elif sys.platform.startswith('linux'): dist = platform.dist() # example: ('fedora', '11', 'Leonidas') # example: ('', '', '') for Arch Linux if 0 < len(dist[0]): __os = dist[0] + '/' + dist[1] + '-' + dist[2] elif sys.platform[:6] == 'netbsd': __sys = platform.system() mach = platform.machine() rel = platform.release() __os = __sys + '/' + mach + ' ' + rel __locale = "" try: import locale __locale = locale.getdefaultlocale()[0] # e.g., en_US except: logger.exception('Exception when getting default locale') try: import gtk gtkver = '; GTK %s' % '.'.join([str(x) for x in gtk.gtk_version]) except: gtkver = "" agent = "BleachBit/%s (%s; %s; %s%s)" % (bleachbit.APP_VERSION, __platform, __os, __locale, gtkver) return agent
def sendError(self): data = {} data['platform'] = platform.uname()[0] data['platform_version'] = platform.uname()[2] data['hashed_hostname'] = hashlib.sha1(platform.uname()[1]).hexdigest() data['hashed_username'] = hashlib.sha1(os.getlogin()).hexdigest() data['source'] = 'UV-CDAT' data['source_version'] = '1.2.1' data['description'] = self.getDescription() data['stack_trace'] = self.errorDetails.toPlainText() data['severity'] = 'FATAL' data['comments'] = self.userComments.toPlainText() if get_vistrails_configuration().output != '': fname = get_vistrails_configuration().output # read at most last 5000 chars from output log with open(fname, "r") as f: f.seek (0, 2) # Seek @ EOF fsize = f.tell() # Get Size f.seek (max (fsize-5000, 0), 0) # Set pos @ last n chars data['execution_log'] = f.read() print urlencode(data) print "http://uvcdat.llnl.gov/UVCDATUsage/log/add/error/" result = urlopen("http://uvcdat.llnl.gov/UVCDATUsage/log/add/error/", urlencode(data))
def prepare(self): # Determine which Syncthing to use suffix, tag = StDownloader.determine_platform() # Report error on unsupported platforms if suffix is None or tag is None: pd = "%s %s %s" % ( platform.uname()[0], platform.uname()[2], # OS, version platform.uname()[4]) # architecture self.parent.error(self, _("Cannot download Syncthing daemon."), _("This platform (%s) is not supported") % (pd,), False) return # Determine target file & directory self.target = os.path.join( os.path.expanduser(StDownloader.get_target_folder()), "syncthing%s" % (suffix,) ) # Create downloader and connect events self.sd = StDownloader(self.target, tag) self.sd.connect("error", self.on_download_error) self.sd.connect("version", self.on_version) self.sd.connect("download-progress", self.on_progress) self.sd.connect("download-finished", self.on_extract_start) self.sd.connect("extraction-progress", self.on_progress) self.sd.connect("extraction-finished", self.on_extract_finished) # Start downloading self.sd.get_version()
def test_user_agent_set(self): config = self.config okta = OktaAPIAuth(**config) okta.pool = MagicMock() class Urlopen_Mock: data = '{}' okta.pool.urlopen.return_value = Urlopen_Mock() user_agent = 'user-agent' # http://www.ietf.org/rfc/rfc2616.txt # OktaOpenVPN/1.0.0 (Darwin 13.4.0) CPython/2.7.1 okta.preauth() args = okta.pool.urlopen.call_args_list headers = args[0][1]['headers'] self.assertIn(user_agent, headers) actual = headers[user_agent] import platform system = platform.uname()[0] system_version = platform.uname()[2] python_version = "{}/{}".format( platform.python_implementation(), platform.python_version(), ) for expected in ['OktaOpenVPN/', system, system_version, python_version]: self.assertIn(expected, actual)
def connect_AXL(): # First we connect to the AXL API # Logging for debugging logging.basicConfig(level=logging.INFO) #logging.getLogger('suds.client').setLevel(logging.DEBUG) logging.getLogger('suds.client').setLevel(logging.CRITICAL) #logging.getLogger('suds.transport').setLevel(logging.DEBUG) #logging.getLogger('suds.xsd.schema').setLevel(logging.CRITICAL) #logging.getLogger('suds.wsdl').setLevel(logging.CRITICAL) #"Connecting to myphone.central1.com to add the line and phones for the user" location = 'https://172.20.133.71:8443/axl/' if platform.uname()[0] == 'Darwin': # OSX path wsdl = 'file:///Users/fbobes/Documents/Python/CUCM/AXLAPI.wsdl' elif platform.uname()[0] == 'Linux': # Linux path wsdl = 'file:///home/fbobes/cucm/AXLAPI.wsdl' else: # Some other OS wsdl = 'file:///home/fbobes/cucm/AXLAPI.wsdl' auth = getAuthentication("axl") Username = auth[0] Password = auth[1] # Bypassing SSL self-cert check ssl._create_default_https_context = ssl._create_unverified_context # URL Detail client = Client(wsdl,location = location, transport = HttpAuthenticated(username = Username, password = Password),faults=False) return client
def start(self, session_path): self._session_path = session_path root = path.realpath(path.join(path.dirname(__file__), '../../..')) exe_path = path.join(root, 'bin/engine') if platform.uname().system == 'Windows': r_home = path.join(root, 'Frameworks', 'R') paths = [ path.join(root, 'Resources', 'lib'), path.join(r_home, 'bin', 'x64'), path.join(r_home, 'library', 'RInside', 'lib', 'x64'), ] all_paths = ';'.join(paths) else: all_paths = '' env = os.environ env['PATH'] = all_paths con = '--con={}'.format(self._address) pth = '--path={}'.format(self._session_path) self._process = Popen([exe_path, con, pth], env=env) self._socket = nanomsg.Socket(nanomsg.PAIR) self._socket._set_recv_timeout(500) if platform.uname().system == 'Windows': self._socket.bind(self._address.encode('utf-8')) else: self._socket.bind(self._address) self._thread = threading.Thread(target=self._run) self._thread.start()
def generate(env): static_obj, shared_obj = SCons.Tool.createObjBuilders(env) StaticObjcAction = SCons.Action.Action("$OBJCCOM") SharedObjcAction = SCons.Action.Action("$OBJCSCOM") static_obj.add_action(".m", StaticObjcAction) shared_obj.add_action(".m", StaticObjcAction) static_obj.add_emitter(".m", SCons.Defaults.StaticObjectEmitter) shared_obj.add_emitter(".m", SCons.Defaults.SharedObjectEmitter) if sys.platform == "darwin" and platform.uname()[2] == "10.0.0": env["OBJC"] = "gcc-4.0" else: env["OBJC"] = "gcc" # What a hack! if sys.platform == "darwin" and platform.uname()[2] == "10.0.0": env["CC"] = "gcc-4.0" env["OBJCFLAGS"] = SCons.Util.CLVar("") env["OBJCPPPATH"] = [] env["OBJCCOM"] = SCons.Action.Action(objccom) env["OBJCSCOM"] = SCons.Action.Action(objcscom) env["_OBJCPPINCFLAGS"] = ("${_concat(INCPREFIX, OBJCPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)}",) env.Append(OBJCFLAGS="-fobjc-exceptions") if sys.platform != "darwin": env.Append(OBJCFLAGS="-fconstant-string-class=NSConstantString") if sys.platform == "win32": env.Append(OBJCPPPATH=["/GNUstep/GNUstep/System/Library/Headers/", "/GNUstep/GNUstep/local/include"]) else: env.Append(OBJCPPPATH=["/usr/include/GNUstep"])
def main(): # This test should only run on Linux x86_64 # use platform instead of os since os fails on windows import platform, sys if platform.uname()[0] != 'Linux' or platform.uname()[4] != 'x86_64': return startOrAttachToDAWN() addExternalFile("results_b21-2672_detector_040713_102411.nxs", "suite_ncd", "tst_ncd_model_builder", "data", "examples") openPerspective("NCD Model Builder Perspective") createAndChangeToSquishtestsTempDirectory() sasDirectoryPrefixPattern = "EDApplicationSASPipeline*" deleteOldLogFiles(sasDirectoryPrefixPattern) clickButton(waitForObject(":Data parameters...._Button")) chooseFile(waitForObject(":SWT"), "/scratch/workspace/suite_ncd/tst_ncd_model_builder/workspace/data/examples/results_b21-2672_detector_040713_102411.nxs") mouseClick(waitForObject(":Data parameters.Working directory_Text"), 144, 5, 0, Button.Button1) type(waitForObject(":Data parameters.Working directory_Text"), "<Ctrl+a>") type(waitForObject(":Data parameters.Working directory_Text"), "/dls/tmp/squishtests") type(waitForObject(":Data parameters.Working directory_Text"), "<Ctrl+a>") type(waitForObject(":Data parameters.Working directory_Text"), "<Ctrl+c>") type(waitForObject(":Data parameters.HTML results directory_Text"), "<Ctrl+v>") clickButton(waitForObject(":Data parameters.Run NCD model building_Button")) snooze(130) logfile = findLogFile(sasDirectoryPrefixPattern, 100) test.verify(logfile != None, "Existence of log file") closeOrDetachFromDAWN()
def run(self): if ((platform.uname()[0] == 'Windows') or ((platform.uname()[0]).find('CYGWIN') != -1)): make('mingw-dll') else: make('all') build_py.build_py.run(self)
def check_environ(product): if product not in ("firefox", "servo"): config = serve.load_config(os.path.join(wpt_root, "config.default.json"), os.path.join(wpt_root, "config.json")) config = serve.normalise_config(config, {}) expected_hosts = (set(config["domains"].itervalues()) ^ set(config["not_domains"].itervalues())) missing_hosts = set(expected_hosts) if platform.uname()[0] != "Windows": hosts_path = "/etc/hosts" else: hosts_path = "C:\Windows\System32\drivers\etc\hosts" with open(hosts_path, "r") as f: for line in f: line = line.split("#", 1)[0].strip() parts = line.split() hosts = parts[1:] for host in hosts: missing_hosts.discard(host) if missing_hosts: if platform.uname()[0] != "Windows": message = """Missing hosts file configuration. Run python wpt make-hosts-file >> %s from a shell with Administrator privileges.""" % hosts_path else: message = """Missing hosts file configuration. Run ./wpt make-hosts-file | sudo tee -a %s""" % hosts_path raise WptrunError(message)
def __init__(self, le2mserv): super(GuiServeur, self).__init__() self._le2mserv = le2mserv self._questcomp = None self.ui = servguimain.Ui_EcranServeur() self.ui.setupUi(self) self._create_menus() self.ui.label_le2m.setText(le2mtrans(u"LE2M\nExperimental Economics Software of Montpellier")) # tabs self.ui.onglets.setTabText(self.ui.onglets.indexOf(self.ui.tabInfos), le2mtrans(u"Informations")) self.ui.onglets.setTabText(self.ui.onglets.indexOf(self.ui.tabClients), le2mtrans(u"Remotes")) self.ui.onglets.setTabText(self.ui.onglets.indexOf(self.ui.tabExperience), le2mtrans(u"Experiment")) # picture on the first tab --------------------------------------------- try: img_labo_pix = QtGui.QPixmap(params.getp("LABLOGO")) self.ui.label_logo_laboratoire.setPixmap(img_labo_pix) except IOError: logger.warning(u"Error while loading LABLOGO picture") self.ui.label_logo_laboratoire.setText(le2mtrans(u"Here the logo of the lab")) try: img_leem_pix = QtGui.QPixmap(params.getp("LABPICTURE")) self.ui.label_image_centre.setPixmap(img_leem_pix) except IOError: logger.warning(u"Error while loading LABPICTURE picture") self.ui.label_image_centre.setText(le2mtrans(u"Here the main picture")) # icons for the waiting mode ------------------------------------------- self._icon_rouge = QtGui.QIcon(os.path.join(params.getp("IMGDIR"), "red.png")) self._icon_vert = QtGui.QIcon(os.path.join(params.getp("IMGDIR"), "green.png")) self.ui.label_attente.setText(le2mtrans(u"Decisions")) self._players_wait_mode = list() # server infos --------------------------------------------------------- self.ui.label_infos_serveur.setText( u"OS: {} {} | Python version: {} | Hostname: {} | IP: {}".format( platform.uname()[0], platform.uname()[2], sys.version.split()[0], self._le2mserv.hostname, self._le2mserv.ip, ) ) # table on the second tab with connected remotes ----------------------- # handle automatic and simulation modes as well as remotes' deconnection self.ui.label_connectedremotes.setText(le2mtrans(u"Connected remotes: 0")) self.tableJoueurs = TableModelJoueurs() self.ui.tv_clients_connectes.setModel(self.tableJoueurs) self.ui.tv_clients_connectes.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch) self.ui.tv_clients_connectes.horizontalHeader().setClickable(True) self.ui.tv_clients_connectes.horizontalHeader().sectionClicked[int].connect(self.tableJoueurs.inverse) self.ui.onglets.setCurrentIndex(0) self.setWindowTitle(le2mtrans(u"LE2M"))
def arch(): if sys.platform.lower().startswith('linux') and (uname()[4].lower().startswith('arm') or uname()[4].lower().startswith('aarch')): return 'arm' elif sys.maxsize > 2**32: return 'x64' else: return 'x86'
def linuxType(self): print platform.uname() if platform.dist()[0] == "centos": return self.centos elif platform.uname()[1] == self.ubu: return self.ubu else: return "ok"
def Epel_install(): plform = platform.linux_distribution() fusion = plform[1] rhat = fusion[0] ########################### #Redhat6 32/64 bit version# ########################### if rhat == "6": print "RedHat6" print "Now installing Epel for RedHat6" chuck = platform.uname() arch = chuck[5] if arch == "x86_64": os.system(R6_64) os.system(R6PKG) os.system('rm *.rpm') elif arch == "i386": os.system(R6_32) os.system(R6PKG) os.system('rm *.rpm') ########################### #Redhat5 32/64 bit version# ########################### elif rhat == "5": print "RedHat 5" print "Now installing Epel for RedHat5" chuck = platform.uname() arch = chuck[5] if arch == "x86_64": os.system(R5_64) os.system(R5PKG) os.system('rm *.rpm') elif arch == "i386": os.system(R5_32) os.system(R5PKG) os.system('rm *.rpm') ########################### #Redhat4 32/64 bit version# ########################### elif rhat == "4": print "RedHat 4" print "Now installing Epel for RedHat4" chuck = platform.uname() arch = chuck[5] if arch == "x86_64": os.system(R4_64) os.system(R4PKG) os.system('rm *.rpm') elif arch == "i386": os.system(R4_32) os.system(R4PKG) os.system('rm *.rpm') else: print "This system is running either windows or another flavor of linux"
def system_info(name, version): return {'app_name': name, 'app_version': version, 'sputnik_version': __version__, 'py': platform.python_implementation(), 'py_version': platform.python_version(), 'os': platform.uname()[0], 'os_version': platform.uname()[2], 'bits': 64 if sys.maxsize > 2**32 else 32}
def main(): # This test should only run on Linux x86_64 import platform, sys if platform.uname()[0] != 'Linux' or platform.uname()[4] != 'x86_64': return # Start Dawn, open correct perspective, set a reasonable port startOrAttachToDAWN() openPerspective("MX Pipelines") setPort(8081) # Create, set and change to the working directory wdir = "/dls/tmp/squishtests" try: os.makedirs(wdir, 0777) except: pass setDirectories(wdir) os.chdir(wdir) # Delete old log files deleteOldLogFiles("EDApplicationDimplev0*") # Go to Dimple tab clickTab(waitForObject(":Dimple_CTabItem"), 15, 9, 0, Button.Button1) copyToClipboard("/dls/sci-scratch/ExampleData/MXPipelines/dls.thaumatin.pdb") # ... and paste into PDB field mouseClick(waitForObject(":Dimple Input Parameters.PDB file_Text_2"), 135, 11, 0, Button.Button1) type(waitForObject(":Dimple Input Parameters.PDB file_Text_2"), "<Ctrl+v>") copyToClipboard("/dls/sci-scratch/ExampleData/MXPipelines/fast_dp.mtz") # ... and paste into MTZ field mouseClick(waitForObject(":Dimple Input Parameters.MTZ file_Text_2"), 92, 13, 0, Button.Button1) type(waitForObject(":Dimple Input Parameters.MTZ file_Text_2"), "<Ctrl+v>") clickButton(waitForObject(":Dimple Input Parameters.Get MTZ column headers_Button_2")) # Click button to run pipeline widget = findObject(":Run pipeline.Run pipeline with given parameters_Button_2") while not widget.enabled: snooze(1.0) clickButton(widget) snooze(15.0) # Find the log file produced when running the pipeline logfile = findLogFile("EDApplicationDimplev0*", 100) test.verify(logfile != None, "Existence of log file") # Verify that the pipeline succeeded by checking the log file for certain key phrases checkLogFile(wdir + '/' + logfile, 'Dimplev0 successful', 500) # The End closeOrDetachFromDAWN()
def get_cpuinfo(): if platform.uname()[5]: return platform.uname()[5] if os.path.exists("/proc/cpuinfo"): with open("/proc/cpuinfo", "rU") as f: for line in f: if line.startswith("model name"): return line.split(": ")[1].replace("\n", "").replace("\r", "") return "unknown"
def resolveBaeDir(): # Specify your baseDir if 'Darwin' in platform.uname()[0]: return "" elif 'Linux' in platform.uname()[0]: return "/home/ilps/Downloads/COMP432/h2/" elif 'Windows' in platform.uname()[0]: return "C:\\Users\\Yu\\Downloads\\" else: return ""
def user_agent(self): uname = platform.uname() user_agent_vars = [ ('Sputnik', __version__), (self.name, self.version), (platform.python_implementation(), platform.python_version()), (platform.uname()[0], uname[2]), ('64bits', sys.maxsize > 2**32)] return ' '.join(['%s/%s' % (k, v) for k, v in user_agent_vars if k])
def __process_blocks(self): if uname()[0] != 'Windows': self.block_size = self.__get_block_size() elif uname()[0] == 'Windows': print 'detected Windows OS: setting block size to defalt 4096' try: blocks, lastblock = self.__calc_blocks() except OSError, e: print e return 1
def test_uname_slices(self): res = platform.uname() expected = tuple(res) self.assertEqual(res[:], expected) self.assertEqual(res[:5], expected[:5])
# Hannah Sonsalla, June 2017, for CSinParallel from __future__ import print_function import sys, os, platform # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.imgmath'] pngmath_dvipng_args = ['gamma', '1.5', '-D', '150', '-bg', 'Transparent'] if 'Darwin' in platform.uname()[0]: pngmath_latex = '/usr/local/texlive/2011/bin/x86_64-darwin/latex' pngmath_dvipng = '/usr/local/texlive/2011/bin/x86_64-darwin/dvipng' elif 'Linux' in platform.uname()[0]: pngmath_latex = '/usr/bin/latex' pngmath_dvipng = '/usr/bin/dvipng' elif 'Windows' in platform.uname()[0]: pngmath_latex = '' pngmath_dvipng = '' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst'
def checkDependency(self): '''Checks if files in config have changed, the command line options have changed or the PATH has changed''' ''' By default - checks if configure needs to be run''' ''' If --arch-hash it manages the same information but it:''' ''' * computes a short hash for the configuration <hashvalue>''' ''' * sets self.arch and PETSC_ARCH to arch-<hashvalue>''' ''' This results in the downloaded packages being installed once to the arch-<hasvalue> directory''' ''' and a new directory with a different hash is created if the configuration changes.''' ''' This mode is intended mostly for testing to reduce reconfigure and recompile times (not currently used)''' ''' If --package-prefix-hash=directory is provided''' ''' * computes a short hash for the configuration <hashvalue>''' ''' * puts the downloaded external packages into location directory/hash''' ''' This results in the downloaded packages being installed once''' ''' and a new directory with a different hash is created if the configuration changes.''' ''' This mode is intended mostly for testing to reduce time of reinstalling external packages''' import os import sys import hashlib import platform if sys.version_info < (3, ): hash = 'Uname: ' + platform.uname()[0] + ' ' + platform.uname( )[4] + '\n' else: hash = 'Uname: ' + platform.uname().system + ' ' + platform.uname( ).processor + '\n' hash += 'PATH=' + os.environ.get('PATH', '') + '\n' args = sorted( set( filter( lambda x: not (x.startswith('PETSC_ARCH') or x == '--force' ), sys.argv[1:]))) hash += 'args:\n' + '\n'.join(' ' + a for a in args) + '\n' chash = '' try: for root, dirs, files in os.walk('config'): if root == 'config': dirs.remove('examples') for f in files: if not f.endswith('.py') or f.startswith( '.') or f.startswith('#'): continue fname = os.path.join(root, f) with open(fname, 'rb') as f: chash += hashlib.sha256( f.read()).hexdigest() + ' ' + fname + '\n' except: self.logPrint( 'Error generating file list/hash from config directory for configure hash, forcing new configuration' ) return hash += '\n'.join(sorted(chash.splitlines())) hashfilepackages = None # Generate short hash to use for the arch so the same arch can be reused if the configuration files don't change if 'arch-hash' in self.argDB: if self.argDB['prefix']: raise RuntimeError('Cannot provide --prefix and --arch-hash') if hasattr(self.argDB, 'PETSC_ARCH'): raise RuntimeError('Cannot provide PETSC_ARCH and --arch-hash') if 'package-prefix-hash' in self.argDB: raise RuntimeError( 'Cannot provide --arch-hash and --package-prefix-hash') if os.getenv('PETSC_ARCH'): raise RuntimeError( 'Do not set the environmental variable PETSC_ARCH and use --arch-hash' ) if 'arch-hash' in self.argDB or 'package-prefix-hash' in self.argDB: import hashlib m = hashlib.md5() m.update(hash.encode('utf-8')) hprefix = m.hexdigest() if 'arch-hash' in self.argDB: self.argDB['PETSC_ARCH'] = 'arch-' + hprefix[0:6] self.arch = 'arch-' + hprefix[0:6] else: if not os.path.isdir(self.argDB['package-prefix-hash']): self.logPrintBox( 'Specified package-prefix-hash location %s not found! Attemping to create this dir!' % self.argDB['package-prefix-hash']) try: os.makedirs(self.argDB['package-prefix-hash']) except Exception as e: self.logPrint( 'Error creating package-prefix-hash directory ' + self.argDB['package-prefix-hash'] + ': ' + str(e)) raise RuntimeError( 'You must have write permission to create this directory!' ) status = False for idx in range(6, len(hprefix)): hashdirpackages = os.path.join( self.argDB['package-prefix-hash'], hprefix[0:idx]) hashfilepackages = os.path.join(hashdirpackages, 'configure-hash') if os.path.isdir(hashdirpackages): if os.path.exists(hashfilepackages): self.argDB[ 'package-prefix-hash'] = 'reuse' # indicates prefix libraries already built, no need to rebuild status = True break else: continue # perhaps an incomplete build? use a longer hash else: try: os.mkdir(hashdirpackages) except Exception as e: self.logPrint( 'Error creating package-prefix-hash directory ' + hashdirpackages + ': ' + str(e)) raise RuntimeError( 'You must have write permission on --package-prefix-hash=' + self.argDB['package-prefix-hash'] + ' directory') status = True break if not status: raise RuntimeError( 'Unable to create package-prefix-hash dir! Suggest cleaning up %s* !' % os.path.join(self.argDB['package-prefix-hash'], hprefix[0:6])) self.argDB['prefix'] = hashdirpackages hashfile = os.path.join(self.arch, 'lib', 'petsc', 'conf', 'configure-hash') if self.argDB['force']: self.logPrint('Forcing a new configuration requested by use') self.makeDependency(hash, hashfile, hashfilepackages) return a = '' try: with open(hashfile, 'r') as f: a = f.read() except: self.logPrint('No previous hashfile found') self.makeDependency(hash, hashfile, hashfilepackages) return if a == hash: try: self.logPrint( 'Attempting to save lib/petsc/conf/petscvariables file') with open( os.path.join('lib', 'petsc', 'conf', 'petscvariables'), 'w') as g: g.write('PETSC_ARCH=' + self.arch + '\n') g.write('PETSC_DIR=' + self.petscdir.dir + '\n') g.write( 'include $(PETSC_DIR)/$(PETSC_ARCH)/lib/petsc/conf/petscvariables\n' ) self.logPrint('Saved lib/petsc/conf/petscvariables file') except: self.logPrint( 'Unable to save lib/petsc/conf/petscvariables file') self.logPrint('configure hash file: ' + hashfile + ' matches; no need to run configure.') print( 'Your configure options and state has not changed; no need to run configure' ) print( 'However you can force a configure run using the option: --force' ) sys.exit() self.logPrint('configure hash file: ' + hashfile + ' does not match\n' + a + '\n---\n' + hash + '\n need to run configure') self.makeDependency(hash, hashfile, hashfilepackages)
def get_machineinfo(): if platform.uname()[4]: return platform.uname()[4] return "unknown"
' --list-tests print out all available tests. This option will not run any tests' ) print( ' if both list-tags and list-tests are specified each test will be printed along' ) print(' with what tags it has') print( ' --no-exceptions do not load the LibCI/exceptions.specs file') print(' --context The context to use for test configuration') sys.exit(2) # get os and directories for future use # NOTE: WSL will read as 'Linux' but the build is Windows-based! system = platform.system() if system == 'Linux' and "microsoft" not in platform.uname()[3].lower(): linux = True else: linux = False # Parse command-line: try: opts, args = getopt.getopt(sys.argv[1:], 'hvqr:st:', longopts=[ 'help', 'verbose', 'debug', 'quiet', 'regex=', 'stdout', 'tag=', 'list-tags', 'list-tests', 'no-exceptions', 'context=' ]) except getopt.GetoptError as err: log.e(err) # something like "option -a not recognized"
def get_pretty_machine_info(): uname = platform.uname() return "Running on " + uname[0] + " " + uname[2] + " " + uname[4]
return up_time # Global Variables COUNT_MSG = 0 USERS = {} COUNT_PM = {} LASTMSG = {} CMD_HELP = {} ZALG_LIST = {} ISAFK = False AFKREASON = None DELMSG = False ##Constants DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node repo = Repo() modules = CMD_HELP uptime = time.strftime('%X') ## output = ("` =============================== `\n" f"`Fizilion is Up and Running.... `\n" f"`=============================== `\n" f"•`Telethon : v{version.__version__} `\n" f"•`Python : v{python_version()} `\n" f"•`User : {DEFAULTUSER} `\n" f"•`Running on : {repo.active_branch.name} `\n" f"•`Loaded modules : 105 `\n" f"•`Fizilion : {USERBOT_VERSION} `\n" f"•`Bot started at : {uptime} `\n")
if hasattr(e, 'reason'): log.info(' . reason: %s', e.reason) if hasattr(e, 'code'): log.info(' . code %s: ', e.code) if hasattr(e, 'read'): log.log(utils_log.TRACE_LEVEL, ' . detail:\n%s', e.read()) log.debug('-' * 60) log.debug("Stack trace exception is detailed herafter:") exc_type, exc_value, exc_tb = sys.exc_info() x = traceback.format_exception(exc_type, exc_value, exc_tb) for stack in x: log.debug(' . %s', stack.replace('\n', '')) log.debug('-' * 60) log.log(utils_log.TRACE_LEVEL, 'System info is provided hereafter:') system, node, release, version, machine, processor = platform.uname() log.log(utils_log.TRACE_LEVEL, ' . system : %s', system) log.log(utils_log.TRACE_LEVEL, ' . node : %s', node) log.log(utils_log.TRACE_LEVEL, ' . release : %s', release) log.log(utils_log.TRACE_LEVEL, ' . version : %s', version) log.log(utils_log.TRACE_LEVEL, ' . machine : %s', machine) log.log(utils_log.TRACE_LEVEL, ' . processor: %s', processor) log.log(utils_log.TRACE_LEVEL, ' . python : %s', sys.version) log.log(utils_log.TRACE_LEVEL, ' . client : %s', get_client_version()) log.log(utils_log.TRACE_LEVEL, '-' * 60) sys.exit(ERROR_CODE_EXIT) finally: log.debug("Elapsed time : %s",
async def amireallyalivereset(ureset): global DEFAULTUSER DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node await ureset.edit("`" "Successfully reset user for alive!" "`")
if os.environ["ELK_SSL"]=="true": host_params = {'host':os.environ["ELK_URL"], 'port':int(os.environ["ELK_PORT"]), 'use_ssl':True} es = ES([host_params], connection_class=RC, http_auth=(os.environ["ELK_LOGIN"], os.environ["ELK_PASSWORD"]), use_ssl=True ,verify_certs=False) else: host_params="http://"+os.environ["ELK_URL"]+":"+os.environ["ELK_PORT"] es = ES(hosts=[host_params]) logger.info("AMQC_URL :"+os.environ["AMQC_URL"]) SECONDSBETWEENCHECKS=3600 nextload=datetime.now() while True: time.sleep(5) try: variables={"platform":"_/_".join(platform.uname()),"icon":"list-alt"} conn.send_life_sign(variables=variables) if (datetime.now() > nextload): try: start = datetime.now() start = start.replace(hour=0,minute=0,second=0, microsecond=0) nextload=datetime.now()+timedelta(seconds=SECONDSBETWEENCHECKS) doTheWork(start) except Exception as e2: logger.error("Unable to load sites data.") logger.error(e2,exc_info=True)
import platform import cPickle as pickle from math import ceil from traceback import format_exc from contextlib import closing from requests import get import xbmc import xbmcvfs from addon import Addon from utilities import get_duration, HachoirError kodi_monitor = xbmc.Monitor() addon = Addon() # This is for potential statistic and debugging purposes addon.log_notice('sys.platform: "{0}". platform.uname: "{1}"'.format( sys.platform, str(platform.uname()))) try: import libtorrent # Try to import global module except ImportError: sys.path.append(os.path.join(addon.path, 'site-packages')) from python_libtorrent import get_libtorrent libtorrent = get_libtorrent() addon.log_debug('libtorrent version: {0}'.format(libtorrent.version)) class TorrenterError(Exception): """Custom exception""" pass
and comprise the entire 6 s of recording. The idea here is to open the files individually, extract the data, save it to a dataframe and compile all of the files of the same genotype into a dataframe. Then take the mean. Then plot the means vs. all traces for both OMP and Gg8. """ import pandas as pd import numpy as np import matplotlib.pyplot as plt import os import platform ''' ################## Define file structure on server #################### ''' # home_dir will depend on the OS, but the rest will not # query machine identity and set home_dir from there machine = platform.uname()[0] if machine == 'Darwin': home_dir = '/Volumes/Urban' elif machine == 'Linux': home_dir = '/run/user/1000/gvfs/smb-share:server=130.49.237.41,share=urban' elif machine == 'Windows': home_dir = os.path.join('N:', os.sep, 'urban') else: print("OS not recognized. \nPlease see Nate for correction.") project_dir = os.path.join(home_dir, 'Huang', 'OSN_OMPvGg8_MTC') figure_dir = os.path.join(project_dir, 'figures')
def _check_for_openmp(): """Check whether the default compiler supports OpenMP. This routine is adapted from pynbody // yt. Thanks to Nathan Goldbaum and Andrew Pontzen. """ import distutils.sysconfig import subprocess tmpdir = tempfile.mkdtemp(prefix='pyrocko') compiler = os.environ.get( 'CC', distutils.sysconfig.get_config_var('CC')).split()[0] # Attempt to compile a test script. # See http://openmp.org/wp/openmp-compilers/ tmpfile = op.join(tmpdir, 'check_openmp.c') with open(tmpfile, 'w') as f: f.write(''' #include <omp.h> #include <stdio.h> int main() { #pragma omp parallel printf("Hello from thread %d", omp_get_thread_num()); } ''') try: with open(os.devnull, 'w') as fnull: exit_code = subprocess.call([ compiler, '-fopenmp', '-o%s' % op.join(tmpdir, 'check_openmp'), tmpfile ], stdout=fnull, stderr=fnull) except OSError: exit_code = 1 finally: shutil.rmtree(tmpdir) if exit_code == 0: print('Continuing your build using OpenMP...') return True import multiprocessing import platform if multiprocessing.cpu_count() > 1: print('''WARNING OpenMP support is not available in your default C compiler, even though your machine has more than one core available. Some routines in pyrocko are parallelized using OpenMP and these will only run on one core with your current configuration. ''') if platform.uname()[0] == 'Darwin': print( '''Since you are running on Mac OS, it's likely that the problem here is Apple's Clang, which does not support OpenMP at all. The easiest way to get around this is to download the latest version of gcc from here: http://hpc.sourceforge.net. After downloading, just point the CC environment variable to the real gcc and OpenMP support should get enabled automatically. Something like this - sudo tar -xzf /path/to/download.tar.gz / export CC='/usr/local/bin/gcc' python setup.py clean python setup.py build ''') print('Continuing your build without OpenMP...') return False
version="0.1.7", description= "Most Powerful cross-platform Video Processing Python framework powerpacked with unique trailblazing features.", license="Apache License 2.0", author="Abhishek Thakur", install_requires=[ "pafy", "mss", "numpy", "youtube-dl", "requests", "pyzmq", "colorlog", "tqdm", ] + (["opencv-python"] if test_opencv() else []) + (["picamera"] if ("arm" in platform.uname()[4][:3]) else []), long_description=long_description, long_description_content_type="text/markdown", author_email="*****@*****.**", url="https://github.com/abhiTronix/vidgear", extras_require={ "asyncio": [ "starlette", "aiofiles", "jinja2", "aiohttp", "uvicorn", "msgpack_numpy", ] + (["uvloop"] if (platform.system() != "Windows") else []) }, keywords=[
def test_uname_copy(self): uname = platform.uname() self.assertEqual(copy.copy(uname), uname) self.assertEqual(copy.deepcopy(uname), uname)
import platform profile = [ platform.architecture(), platform.dist(), platform.libc_ver(), platform.mac_ver(), platform.machine(), platform.node(), platform.platform(), platform.processor(), platform.python_build(), platform.python_compiler(), platform.python_version(), platform.system(), platform.uname(), platform.version(), ] for item in profile: print item # Script Name : logs.py # Author : Craig Richards # Created : 13th October 2011 # Last Modified : # Version : 1.1 # Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables. # Description : This script will search for all *.log files in the given directory, zip them using the program you specify and then date stamp them import os # Load the Library Module from time import strftime # Load just the strftime Module from Time
encoding = locale.getdefaultlocale()[1] or 'ascii' codecs.lookup(encoding) except Exception: encoding = 'ascii' return encoding DEFAULT_LOCALE_ENCODING = get_system_encoding() mswindows = (sys.platform == "win32") # learning from 'subprocess' module linux = (sys.platform == "linux2") hidden_hostname = True if mswindows: uname = list(platform.uname()) if hidden_hostname: uname[1] = "hidden_hostname" print uname import _winreg try: reg_key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion") if reg_key: ProductName = _winreg.QueryValueEx(reg_key, "ProductName")[0] or None EditionId = _winreg.QueryValueEx(reg_key, "EditionId")[0] or None ReleaseId = _winreg.QueryValueEx(reg_key, "ReleaseId")[0] or None
def __init__(self): super(Win32Helper, self).__init__() self.os_system = platform.system() self.os_release = platform.release() self.os_version = platform.version() self.os_machine = platform.machine() self.os_uname = platform.uname() self.name = "Win32Helper" if "windows" == self.os_system.lower(): win_ver = "win7_" + self.os_machine.lower() if ("5" == self.os_release): win_ver = "winxp" if logger().DEBUG: logger().log("[helper] OS: {} {} {}".format( self.os_system, self.os_release, self.os_version)) self.use_existing_service = False self.win_ver = win_ver self.driver_handle = None self.device_file = str(DEVICE_FILE) c_int_p = POINTER(c_int) # enable required SeSystemEnvironmentPrivilege privilege privilege = win32security.LookupPrivilegeValue( None, 'SeSystemEnvironmentPrivilege') token = win32security.OpenProcessToken( win32process.GetCurrentProcess(), win32security.TOKEN_READ | win32security.TOKEN_ADJUST_PRIVILEGES) win32security.AdjustTokenPrivileges( token, False, [(privilege, win32security.SE_PRIVILEGE_ENABLED)]) win32api.CloseHandle(token) # import firmware variable API try: self.GetFirmwareEnvironmentVariable = kernel32.GetFirmwareEnvironmentVariableW self.GetFirmwareEnvironmentVariable.restype = c_int self.GetFirmwareEnvironmentVariable.argtypes = [ c_wchar_p, c_wchar_p, c_void_p, c_int ] self.SetFirmwareEnvironmentVariable = kernel32.SetFirmwareEnvironmentVariableW self.SetFirmwareEnvironmentVariable.restype = c_int self.SetFirmwareEnvironmentVariable.argtypes = [ c_wchar_p, c_wchar_p, c_void_p, c_int ] except AttributeError as msg: logger().warn( "G[S]etFirmwareEnvironmentVariableW function doesn't seem to exist" ) pass try: self.NtEnumerateSystemEnvironmentValuesEx = windll.ntdll.NtEnumerateSystemEnvironmentValuesEx self.NtEnumerateSystemEnvironmentValuesEx.restype = c_int self.NtEnumerateSystemEnvironmentValuesEx.argtypes = [ c_int, c_void_p, c_void_p ] except AttributeError as msg: logger().warn( "NtEnumerateSystemEnvironmentValuesEx function doesn't seem to exist" ) pass try: self.GetFirmwareEnvironmentVariableEx = kernel32.GetFirmwareEnvironmentVariableExW self.GetFirmwareEnvironmentVariableEx.restype = c_int self.GetFirmwareEnvironmentVariableEx.argtypes = [ c_wchar_p, c_wchar_p, c_void_p, c_int, c_int_p ] self.SetFirmwareEnvironmentVariableEx = kernel32.SetFirmwareEnvironmentVariableExW self.SetFirmwareEnvironmentVariableEx.restype = c_int self.SetFirmwareEnvironmentVariableEx.argtypes = [ c_wchar_p, c_wchar_p, c_void_p, c_int, c_int ] except AttributeError as msg: if logger().DEBUG: logger().warn( "G[S]etFirmwareEnvironmentVariableExW function doesn't seem to exist" ) pass try: self.GetSystemFirmwareTbl = kernel32.GetSystemFirmwareTable self.GetSystemFirmwareTbl.restype = c_int self.GetSystemFirmwareTbl.argtypes = [ c_int, c_int, c_void_p, c_int ] except AttributeError as msg: logger().warn( "GetSystemFirmwareTable function doesn't seem to exist") pass try: self.EnumSystemFirmwareTbls = kernel32.EnumSystemFirmwareTables self.EnumSystemFirmwareTbls.restype = c_int self.EnumSystemFirmwareTbls.argtypes = [c_int, c_void_p, c_int] except AttributeError as msg: logger().warn( "GetSystemFirmwareTable function doesn't seem to exist")
def results_output_table(): LIBRARIES = ("hyperjson", "ujson", "yajl", "simplejson", "json", "orjson") ENDC = '\033[0m' GREEN = '\033[92m' uname_system, _, uname_release, uname_version, _, uname_processor = platform.uname( ) print() print("~~~~~~~~~~~~~") print("Test machine:") print("~~~~~~~~~~~~~") print() print(uname_system, uname_release, uname_processor, uname_version) print() column_widths = [max(len(r[0]) for r in benchmark_results)] for library in LIBRARIES: column_widths.append(max(10, len(library))) line = "+{}+".format("+".join("-" * (width + 2) for width in column_widths)) columns = [" " * (width + 2) for width in column_widths] for i, library in enumerate(LIBRARIES): columns[i + 1] = (" " + library).ljust(column_widths[i + 1] + 2) print(line) print("|{}|".format("|".join(columns))) print(line.replace("-", "=")) for name, encodes, decodes in benchmark_results: columns = [" " * (width + 2) for width in column_widths] columns[0] = (" " + name).ljust(column_widths[0] + 2) print("|{}|".format("|".join(columns))) print(line) columns = [None] * len(column_widths) columns[0] = " encode".ljust(column_widths[0] + 2) best = max([encodes[library] for library in LIBRARIES]) for i, library in enumerate(LIBRARIES): if library in encodes: if encodes[library] == best: s = GREEN else: s = '' columns[i + 1] = s + "{:.2f} ".format( encodes[library]).rjust(column_widths[i + 1] + 2) + ENDC else: columns[i + 1] = " " * (column_widths[i + 1] + 2) print("|{}|".format("|".join(columns))) print(line) if decodes: columns = [None] * len(column_widths) columns[0] = " decode".ljust(column_widths[0] + 2) best = max([decodes[library] for library in LIBRARIES]) for i, library in enumerate(LIBRARIES): if library in decodes: if decodes[library] == best: s = GREEN else: s = '' columns[i + 1] = s + "{:.2f} ".format( decodes[library]).rjust(column_widths[i + 1] + 2) + ENDC else: columns[i + 1] = " " * (column_widths[i + 1] + 2) print("|{}|".format("|".join(columns))) print(line)
def __init__(self, session, api_id, api_hash, *, connection=ConnectionTcpFull, use_ipv6=False, proxy=None, timeout=timedelta(seconds=10), request_retries=5, connection_retries=5, auto_reconnect=True, sequential_updates=True, flood_sleep_threshold=60, device_model=None, system_version=None, app_version=None, lang_code='en', system_lang_code='en'): if not api_id or not api_hash: raise ValueError( "Your API ID or Hash cannot be empty or None. " "Refer to telethon.rtfd.io for more information.") self._use_ipv6 = use_ipv6 # Determine what session object we have if isinstance(session, str) or session is None: session = SQLiteSession(session) elif not isinstance(session, Session): raise TypeError( 'The given session must be a str or a Session instance.' ) # ':' in session.server_address is True if it's an IPv6 address if (not session.server_address or (':' in session.server_address) != use_ipv6): session.set_dc( DEFAULT_DC_ID, DEFAULT_IPV6_IP if self._use_ipv6 else DEFAULT_IPV4_IP, DEFAULT_PORT ) self.flood_sleep_threshold = flood_sleep_threshold self.session = session self.api_id = int(api_id) self.api_hash = api_hash self._request_retries = request_retries or sys.maxsize self._connection_retries = connection_retries or sys.maxsize self._auto_reconnect = auto_reconnect if isinstance(connection, type): connection = connection( proxy=proxy, timeout=timeout) # Used on connection. Capture the variables in a lambda since # exporting clients need to create this InvokeWithLayerRequest. system = platform.uname() self._init_with = lambda x: functions.InvokeWithLayerRequest( LAYER, functions.InitConnectionRequest( api_id=self.api_id, device_model=device_model or system.system or 'Unknown', system_version=system_version or system.release or '1.0', app_version=app_version or self.__version__, lang_code=lang_code, system_lang_code=system_lang_code, lang_pack='', # "langPacks are for official apps only" query=x ) ) state = MTProtoState(self.session.auth_key) self._connection = connection self._sender = MTProtoSender( state, connection, retries=self._connection_retries, auto_reconnect=self._auto_reconnect, update_callback=self._handle_update, auth_key_callback=self._auth_key_callback, auto_reconnect_callback=self._handle_auto_reconnect ) # Remember flood-waited requests to avoid making them again self._flood_waited_requests = {} # Cache ``{dc_id: (n, MTProtoSender)}`` for all borrowed senders, # being ``n`` the amount of borrows a given sender has; once ``n`` # reaches ``0`` it should be disconnected and removed. self._borrowed_senders = {} self._borrow_sender_lock = threading.Lock() # Save whether the user is authorized here (a.k.a. logged in) self._authorized = None # None = We don't know yet # Default PingRequest delay self._last_ping = datetime.now() self._ping_delay = timedelta(minutes=1) self._updates_handle = None self._last_request = time.time() self._channel_pts = {} if sequential_updates: self._updates_queue = queue.Queue() self._dispatching_updates_queue = threading.Event() else: self._updates_queue = None self._dispatching_updates_queue = None # Start with invalid state (-1) so we can have somewhere to store # the state, but also be able to determine if we are authorized. self._state = types.updates.State(-1, 0, datetime.now(), 0, -1) # Some further state for subclasses self._event_builders = [] self._events_pending_resolve = [] self._event_resolve_lock = threading.Lock() # Keep track of how many event builders there are for # each type {type: count}. If there's at least one then # the event will be built, and the same event be reused. self._event_builders_count = collections.defaultdict(int) # Default parse mode self._parse_mode = markdown # Some fields to easy signing in. Let {phone: hash} be # a dictionary because the user may change their mind. self._phone_code_hash = {} self._phone = None self._tos = None # Sometimes we need to know who we are, cache the self peer self._self_input_peer = None
# -*- coding: utf-8 -*- import platform print('uname: {0}'.format(platform.uname())) print('system: {0}'.format(platform.system())) print('node: {0}'.format(platform.node())) print('release: {0}'.format(platform.release())) print('version: {0}'.format(platform.version())) print('machine: {0}'.format(platform.machine())) print('processor: {0}'.format(platform.processor()))
def os_version(): return platform.uname().release
from _locale import (setlocale, LC_ALL, LC_CTYPE, LC_NUMERIC, localeconv, Error) try: from _locale import (RADIXCHAR, THOUSEP, nl_langinfo) except ImportError: nl_langinfo = None import codecs import locale import sys import unittest from platform import uname from test.support import run_unittest if uname().system == "Darwin": maj, min, mic = [int(part) for part in uname().release.split(".")] if (maj, min, mic) < (8, 0, 0): raise unittest.SkipTest("locale support broken for OS X < 10.4") candidate_locales = [ 'es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT', 'et_EE', 'es_PY', 'no_NO', 'nl_NL', 'lv_LV', 'el_GR', 'be_BY', 'fr_BE', 'ro_RO', 'ru_UA', 'ru_RU', 'es_VE', 'ca_ES', 'se_NO', 'es_EC', 'id_ID', 'ka_GE', 'es_CL', 'wa_BE', 'hu_HU', 'lt_LT', 'sl_SI', 'hr_HR', 'es_AR', 'es_ES', 'oc_FR', 'gl_ES', 'bg_BG', 'is_IS', 'mk_MK', 'de_AT', 'pt_BR', 'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH', 'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO', 'bs_BA', 'fr_LU', 'kl_GL', 'fa_IR', 'de_BE', 'sv_SE', 'it_CH', 'uk_UA', 'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'en_US', 'es_ES.ISO8859-1', 'fr_FR.ISO8859-15', 'ru_RU.KOI8-R', 'ko_KR.eucKR' ]
def is_wsl() -> bool: platform_info = platform.uname() return platform_info.system == "Linux" and 'microsoft' in platform_info.release.lower( )
def start_reload(zope_layer_dotted_name, reload_paths=('src',), preload_layer_dotted_name='plone.app.testing.PLONE_FIXTURE'): print WAIT("Starting Zope 2 server") zsl = Zope2Server() zsl.start_zope_server(preload_layer_dotted_name) forkloop = ForkLoop() Watcher(reload_paths, forkloop).start() forkloop.start() if forkloop.exit: print WAIT("Stopping Zope 2 server") zsl.stop_zope_server() print READY("Zope 2 server stopped") return # XXX: For unknown reason call to socket.gethostbyaddr may cause malloc # errors on OSX in forked child when called from medusa http_server, but # proper sleep seem to fix it: import time import socket import platform if 'Darwin' in platform.uname(): gethostbyaddr = socket.gethostbyaddr socket.gethostbyaddr = lambda x: time.sleep(0.5) or (ZSERVER_HOST,) # Setting smaller asyncore poll timeout will speed up restart a bit import plone.testing.z2 plone.testing.z2.ZServer.timeout = 0.5 zsl.amend_zope_server(zope_layer_dotted_name) if 'Darwin' in platform.uname(): socket.gethostbyaddr = gethostbyaddr print READY("Zope 2 server started") try: listener = SimpleXMLRPCServer((LISTENER_HOST, LISTENER_PORT), logRequests=False) except socket.error as e: print ERROR(str(e)) print WAIT("Pruning Zope 2 server") zsl.prune_zope_server() return listener.timeout = 0.5 listener.allow_none = True listener.register_function(zsl.zodb_setup, 'zodb_setup') listener.register_function(zsl.zodb_teardown, 'zodb_teardown') try: while not forkloop.exit: listener.handle_request() except select.error: # Interrupted system call pass finally: print WAIT("Pruning Zope 2 server") zsl.prune_zope_server()
async def amireallyalivereset(ureset): """ For .resetalive command, reset the username in the .alive command. """ global DEFAULTUSER DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node await ureset.edit("`" "Successfully reset user for alive!" "`")
def is_macos(): return (platform.uname()[0] == "Darwin")
def os_name(): return platform.uname().system
'flake8-bugbear~=19.8.0', 'mypy==0.770', 'coverage~=4.5.2', 'requests-xml~=0.2.3', 'lxml', ] + DOCS_DEPS, 'docs': DOCS_DEPS, } EXT_CFLAGS = ['-O2'] EXT_LDFLAGS = [] ROOT_PATH = pathlib.Path(__file__).parent.resolve() if platform.uname().system != 'Windows': EXT_CFLAGS.extend([ '-std=c99', '-fsigned-char', '-Wall', '-Wsign-compare', '-Wconversion' ]) def _compile_parsers(build_lib, inplace=False): import parsing import edb.edgeql.parser.grammar.single as edgeql_spec import edb.edgeql.parser.grammar.block as edgeql_spec2 import edb.edgeql.parser.grammar.sdldocument as schema_spec for spec in (edgeql_spec, edgeql_spec2, schema_spec): spec_path = pathlib.Path(spec.__file__).parent subpath = pathlib.Path(str(spec_path)[len(str(ROOT_PATH)) + 1:])
#!/usr/bin/python import platform print platform.uname()