def validate_driver_yuv444lossless(): #this should log the kernel module version v = get_nvidia_module_version() if not v: log.warn("Warning: unknown NVidia driver version") bl = None else: bl = is_blacklisted() if bl is True: raise Exception( "NVidia driver version %s is blacklisted, it does not work with NVENC" % pver(v)) elif bl is None: global _version_warning if _version_warning: l = log else: l = log.warn _version_warning = True if v: l("Warning: NVidia driver version %s is unsupported with NVENC", pver(v)) l(" recommended driver versions: up to 350 only") if envbool("XPRA_NVENC_YUV444P", False): l(" disabling YUV444P and lossless mode") l(" use XPRA_NVENC_YUV444P=1 to force enable it") return False return True
def validate_driver_yuv444lossless(): #this should log the kernel module version v = get_nvidia_module_version() if not v: log.warn("Warning: unknown NVidia driver version") bl = None else: bl = is_blacklisted() if bl is True: raise Exception( "NVidia driver version %s is blacklisted, it does not work with NVENC" % pver(v)) elif bl is None: global _version_warning if _version_warning: l = log else: l = log.warn _version_warning = True if v: l("Warning: NVidia driver version %s is untested with NVENC", pver(v)) l(" (this encoder has been tested with versions up to %s.x only)", MAX_TESTED) if not envbool("XPRA_NVENC_YUV444P", True): l(" enabling YUV444P and lossless mode") l(" use XPRA_NVENC_YUV444P=0 to force disable") return False return True
def validate_driver_yuv444lossless(): #this should log the kernel module version v = get_nvidia_module_version() if not v: log.warn("Warning: unknown NVidia driver version") bl = None else: bl = is_blacklisted() if bl is True: raise Exception("NVidia driver version %s is blacklisted, it does not work with NVENC" % pver(v)) elif bl is None: global _version_warning if _version_warning: l = log else: l = log.warn _version_warning = True if v: l("Warning: NVidia driver version %s is untested with NVENC", pver(v)) l(" (this encoder has been tested with versions up to %s.x only)", MAX_TESTED) if not envbool("XPRA_NVENC_YUV444P", True): l(" enabling YUV444P and lossless mode") l(" use XPRA_NVENC_YUV444P=0 to force disable") return False return True
def identify_nvidia_module_version(): if os.name != "posix": if not sys.platform.startswith("win"): log.warn( "Warning: unable to identify the NVidia driver version on this platform" ) return None #try the nvapi call: try: from xpra.codecs.nvapi_version import get_driver_version #@UnresolvedImport v = get_driver_version() log("NVAPI get_driver_version()=%s", v) except Exception as e: log.warn("failed to get the driver version through NVAPI:") log.warn(" %s", e) else: v = get_nvml_driver_version() or get_proc_driver_version() #only keep numeric values: numver = [] try: for x in v: try: numver.append(int(x)) except ValueError: if len(numver) == 0: raise if numver: log.info("NVidia driver version %s", pver(numver)) return numver except Exception as e: log.warn("failed to parse Nvidia driver version '%s': %s", v, e) return []
def main(): from xpra.platform import init, clean from xpra.util import pver try: init("OpenGL-Check") verbose = "-v" in sys.argv or "--verbose" in sys.argv if verbose: log.enable_debug() #replace ImportError with a log message: global gl_check_error errors = [] def log_error(msg): log.error("ERROR: %s", msg) errors.append(msg) gl_check_error = log_error props = check_support(0, True, verbose) log.info("") if len(errors) > 0: log.info("OpenGL errors:") for e in errors: log.info(" %s", e) log.info("") log.info("OpenGL properties:") for k in sorted(props.keys()): v = props[k] #skip not human readable: if k not in ("extensions", "glconfig"): log.info("* %s : %s", str(k).ljust(24), pver(v)) return len(errors) finally: clean()
def main(): from xpra.platform import init,clean from xpra.platform.gui import init as gui_init from xpra.util import pver try: init("OpenGL-Check") gui_init() verbose = "-v" in sys.argv or "--verbose" in sys.argv if verbose: log.enable_debug() #replace ImportError with a log message: global gl_check_error errors = [] def log_error(msg): log.error("ERROR: %s", msg) errors.append(msg) gl_check_error = log_error props = check_support(0, True, verbose) log.info("") if len(errors)>0: log.info("OpenGL errors:") for e in errors: log.info(" %s", e) log.info("") log.info("OpenGL properties:") for k in sorted(props.keys()): v = props[k] #skip not human readable: if k not in ("extensions", "glconfig"): log.info("* %s : %s", str(k).ljust(24), pver(v)) return len(errors) finally: clean()
def identify_nvidia_module_version(): if os.name!="posix": if not sys.platform.startswith("win"): log.warn("Warning: unable to identify the NVidia driver version on this platform") return None #try the nvapi call: try: from xpra.codecs.nvapi_version import get_driver_version #@UnresolvedImport v = get_driver_version() log("NVAPI get_driver_version()=%s", v) except Exception as e: log.warn("failed to get the driver version through NVAPI:") log.warn(" %s", e) else: v = get_nvml_driver_version() or get_proc_driver_version() #only keep numeric values: numver = [] try: for x in v: try: numver.append(int(x)) except ValueError: if len(numver)==0: raise if numver: log.info("NVidia driver version %s", pver(numver)) return numver except Exception as e: log.warn("failed to parse Nvidia driver version '%s': %s", v, e) return []
def dump_dict(d): pk = None try: for pk, pv in d.items(): try: if isinstance(pv, unicode): sv = pv.encode("utf8") else: sv = nonl(pver(pv)) except Exception as e: sv = repr(pv) print(" %s : %s" % (pk.ljust(32), sv)) except Exception as e: print(" error on %s: %s" % (pk, e)) print(" raw attributes: " % d)
def dump_dict(d): pk = None try: for pk,pv in d.items(): try: if type(pv)==unicode: sv = pv.encode("utf8") else: sv = nonl(pver(pv)) except Exception as e: sv = repr(pv) print(" %s : %s" % (pk.ljust(32), sv)) except Exception as e: print(" error on %s: %s" % (pk, e)) print(" raw attributes: " % d)
def identify_nvidia_module_version(): v = get_nvml_driver_version() or get_proc_driver_version() #only keep numeric values: numver = [] try: for x in v: try: numver.append(int(x)) except ValueError: if not numver: raise if numver: log.info("NVidia driver version %s", pver(numver)) return tuple(numver) except Exception as e: log.warn("failed to parse Nvidia driver version '%s': %s", v, e) return ()
def dump_printers(d): for k in sorted(d.keys()): v = d[k] print("* %s" % k) pk = None try: for pk,pv in v.items(): try: if type(pv)==unicode: sv = pv.encode("utf8") else: sv = nonl(pver(pv)) except Exception as e: sv = repr(pv) print(" %s : %s" % (pk.ljust(32), sv)) except Exception as e: print(" error on %s: %s" % (pk, e)) print(" raw attributes: " % v) attr = get_printer_attributes(k) if attr: print(" attributes:") for a in attr: print(" %s" % a)
def forcever(v): return pver(v, numsep=".", strsep=".").lstrip("v")
def print_dict(d): for k in sorted(d.keys()): v = d[k] print("* %s : %s" % (k.ljust(32), nonl(pver(v))))
def init_opengl(self, enable_opengl): opengllog("init_opengl(%s)", enable_opengl) #enable_opengl can be True, False or None (auto-detect) if enable_opengl is False: self.opengl_props["info"] = "disabled by configuration" return from xpra.scripts.config import OpenGL_safety_check from xpra.platform.gui import gl_check as platform_gl_check warnings = [] for check in (OpenGL_safety_check, platform_gl_check): opengllog("checking with %s", check) warning = check() opengllog("%s()=%s", check, warning) if warning: warnings.append(warning) self.opengl_props["info"] = "" def err(msg, e): opengllog("OpenGL initialization error", exc_info=True) self.GLClientWindowClass = None self.client_supports_opengl = False opengllog.warn("%s", msg) for x in str(e).split("\n"): opengllog.warn(" %s", x) self.opengl_props["info"] = str(e) if warnings: if enable_opengl is True: opengllog.warn( "OpenGL safety warning (enabled at your own risk):") for warning in warnings: opengllog.warn(" %s", warning) self.opengl_props["info"] = "forced enabled despite: %s" % ( ", ".join(warnings)) else: opengllog.warn("OpenGL disabled:", warning) for warning in warnings: opengllog.warn(" %s", warning) self.opengl_props["info"] = "disabled: %s" % ( ", ".join(warnings)) return try: opengllog("init_opengl: going to import xpra.client.gl") __import__("xpra.client.gl", {}, {}, []) __import__("xpra.client.gl.gtk_compat", {}, {}, []) gl_check = __import__("xpra.client.gl.gl_check", {}, {}, ["check_support"]) opengllog("init_opengl: gl_check=%s", gl_check) self.opengl_props = gl_check.check_support( force_enable=(enable_opengl is True)) opengllog("init_opengl: found props %s", self.opengl_props) GTK_GL_CLIENT_WINDOW_MODULE = "xpra.client.gl.gtk%s.gl_client_window" % ( 2 + int(is_gtk3())) opengllog( "init_opengl: trying to load GL client window module '%s'", GTK_GL_CLIENT_WINDOW_MODULE) gl_client_window = __import__(GTK_GL_CLIENT_WINDOW_MODULE, {}, {}, ["GLClientWindow"]) self.GLClientWindowClass = gl_client_window.GLClientWindow self.client_supports_opengl = True #only enable opengl by default if force-enabled or if safe to do so: self.opengl_enabled = ( enable_opengl is True) or self.opengl_props.get("safe", False) self.gl_texture_size_limit = self.opengl_props.get( "texture-size-limit", 16 * 1024) self.gl_max_viewport_dims = self.opengl_props.get( "max-viewport-dims", (self.gl_texture_size_limit, self.gl_texture_size_limit)) if min(self.gl_max_viewport_dims) < 4 * 1024: opengllog.warn("Warning: OpenGL is disabled:") opengllog.warn(" the maximum viewport size is too low: %s", self.gl_max_viewport_dims) self.opengl_enabled = False elif self.gl_texture_size_limit < 4 * 1024: opengllog.warn("Warning: OpenGL is disabled:") opengllog.warn(" the texture size limit is too low: %s", self.gl_texture_size_limit) self.opengl_enabled = False self.GLClientWindowClass.MAX_VIEWPORT_DIMS = self.gl_max_viewport_dims self.GLClientWindowClass.MAX_BACKING_DIMS = self.gl_texture_size_limit, self.gl_texture_size_limit mww, mwh = self.max_window_size opengllog( "OpenGL: enabled=%s, texture-size-limit=%s, max-window-size=%s", self.opengl_enabled, self.gl_texture_size_limit, self.max_window_size) if self.opengl_enabled and self.gl_texture_size_limit < 16 * 1024 and ( mww == 0 or mwh == 0 or self.gl_texture_size_limit < mww or self.gl_texture_size_limit < mwh): #log at warn level if the limit is low: #(if we're likely to hit it - if the screen is as big or bigger) w, h = self.get_root_size() l = opengllog.info if w * 2 <= self.gl_texture_size_limit and h * 2 <= self.gl_texture_size_limit: l = opengllog if w >= self.gl_texture_size_limit or h >= self.gl_texture_size_limit: l = opengllog.warn l( "Warning: OpenGL windows will be clamped to the maximum texture size %ix%i", self.gl_texture_size_limit, self.gl_texture_size_limit) l(" for OpenGL %s renderer '%s'", pver(self.opengl_props.get("opengl", "")), self.opengl_props.get("renderer", "unknown")) if self.opengl_enabled: #try to render using a temporary window: draw_result = {} window = None try: w, h = 50, 50 window = self.GLClientWindowClass(self, None, 2**32 - 1, -100, -100, w, h, w, h, typedict({}), False, typedict({}), self.border, self.max_window_size) window.realize() pixel_format = "BGRX" bpp = len(pixel_format) options = typedict({"pixel_format": pixel_format}) stride = bpp * w img_data = "\0" * stride * h coding = "rgb32" #we have to suspend idle_add to make this synchronous #we can do this because this method must be running in the UI thread already: def no_idle_add(*args, **kwargs): args[0](*args[1:], **kwargs) window._backing.idle_add = no_idle_add widget = window._backing._backing widget.realize() def paint_callback(success, message): opengllog("paint_callback(%s, %s)", success, message) draw_result.update({ "success": success, "message": message, }) opengllog( "OpenGL: testing draw on %s widget %s with %s : %s", window, widget, coding, pixel_format) window.draw_region(0, 0, w, h, coding, img_data, stride, 1, options, [paint_callback]) finally: if window: window.destroy() if not draw_result.get("success"): err("OpenGL test rendering failed:", draw_result.get("message", "unknown error")) return log("OpenGL test rendering succeeded") driver_info = self.opengl_props.get( "renderer") or self.opengl_props.get( "vendor") or "unknown card" if self.opengl_enabled: opengllog.info("OpenGL enabled with %s", driver_info) elif self.client_supports_opengl: opengllog("OpenGL supported with %s, but not enabled", driver_info) except ImportError as e: err("OpenGL support is missing:", e) except RuntimeError as e: err("OpenGL support could not be enabled on this hardware:", e) except Exception as e: err("Error loading OpenGL support:", e)
def init_opengl(self, enable_opengl): opengllog("init_opengl(%s)", enable_opengl) #enable_opengl can be True, False or None (auto-detect) if enable_opengl is False: self.opengl_props["info"] = "disabled by configuration" return from xpra.scripts.config import OpenGL_safety_check from xpra.platform.gui import gl_check as platform_gl_check warnings = [] for check in (OpenGL_safety_check, platform_gl_check): opengllog("checking with %s", check) warning = check() opengllog("%s()=%s", check, warning) if warning: warnings.append(warning) self.opengl_props["info"] = "" if warnings: if enable_opengl is True: opengllog.warn("OpenGL safety warning (enabled at your own risk):") for warning in warnings: opengllog.warn(" %s", warning) self.opengl_props["info"] = "forced enabled despite: %s" % (", ".join(warnings)) else: opengllog.warn("OpenGL disabled:", warning) for warning in warnings: opengllog.warn(" %s", warning) self.opengl_props["info"] = "disabled: %s" % (", ".join(warnings)) return try: opengllog("init_opengl: going to import xpra.client.gl") __import__("xpra.client.gl", {}, {}, []) __import__("xpra.client.gl.gtk_compat", {}, {}, []) gl_check = __import__("xpra.client.gl.gl_check", {}, {}, ["check_support"]) opengllog("init_opengl: gl_check=%s", gl_check) self.opengl_props = gl_check.check_support(force_enable=(enable_opengl is True)) opengllog("init_opengl: found props %s", self.opengl_props) GTK_GL_CLIENT_WINDOW_MODULE = "xpra.client.gl.gtk%s.gl_client_window" % (2+int(is_gtk3())) opengllog("init_opengl: trying to load GL client window module '%s'", GTK_GL_CLIENT_WINDOW_MODULE) gl_client_window = __import__(GTK_GL_CLIENT_WINDOW_MODULE, {}, {}, ["GLClientWindow"]) self.GLClientWindowClass = gl_client_window.GLClientWindow self.client_supports_opengl = True #only enable opengl by default if force-enabled or if safe to do so: self.opengl_enabled = (enable_opengl is True) or self.opengl_props.get("safe", False) self.gl_texture_size_limit = self.opengl_props.get("texture-size-limit", 16*1024) self.gl_max_viewport_dims = self.opengl_props.get("max-viewport-dims", (self.gl_texture_size_limit, self.gl_texture_size_limit)) if min(self.gl_max_viewport_dims)<4*1024: opengllog.warn("Warning: OpenGL is disabled:") opengllog.warn(" the maximum viewport size is too low: %s", self.gl_max_viewport_dims) self.opengl_enabled = False elif self.gl_texture_size_limit<4*1024: opengllog.warn("Warning: OpenGL is disabled:") opengllog.warn(" the texture size limit is too low: %s", self.gl_texture_size_limit) self.opengl_enabled = False self.GLClientWindowClass.MAX_VIEWPORT_DIMS = self.gl_max_viewport_dims self.GLClientWindowClass.MAX_BACKING_DIMS = self.gl_texture_size_limit, self.gl_texture_size_limit self.GLClientWindowClass.MAX_VIEWPORT_DIMS = 8192, 8192 self.GLClientWindowClass.MAX_BACKING_DIMS = 4096, 4096 mww, mwh = self.max_window_size opengllog("OpenGL: enabled=%s, texture-size-limit=%s, max-window-size=%s", self.opengl_enabled, self.gl_texture_size_limit, self.max_window_size) if self.opengl_enabled and self.gl_texture_size_limit<16*1024 and (mww==0 or mwh==0 or self.gl_texture_size_limit<mww or self.gl_texture_size_limit<mwh): #log at warn level if the limit is low: #(if we're likely to hit it - if the screen is as big or bigger) w, h = self.get_root_size() l = opengllog.info if w>=self.gl_texture_size_limit or h>=self.gl_texture_size_limit: l = log.warn l("Warning: OpenGL windows will be clamped to the maximum texture size %ix%i", self.gl_texture_size_limit, self.gl_texture_size_limit) l(" for OpenGL %s renderer '%s'", pver(self.opengl_props.get("opengl", "")), self.opengl_props.get("renderer", "unknown")) driver_info = self.opengl_props.get("renderer") or self.opengl_props.get("vendor") or "unknown card" if self.opengl_enabled: opengllog.info("OpenGL enabled with %s", driver_info) elif self.client_supports_opengl: opengllog("OpenGL supported with %s, but not enabled", driver_info) except ImportError as e: opengllog.warn("OpenGL support is missing:") opengllog.warn(" %s", e) self.opengl_props["info"] = str(e) except RuntimeError as e: opengllog.warn("OpenGL support could not be enabled on this hardware:") opengllog.warn(" %s", e) self.opengl_props["info"] = str(e) except Exception as e: opengllog.error("Error loading OpenGL support:") opengllog.error(" %s", e, exc_info=True) self.opengl_props["info"] = str(e)
def init_opengl(self, enable_opengl): opengllog("init_opengl(%s)", enable_opengl) #enable_opengl can be True, False or None (auto-detect) if enable_opengl is False: self.opengl_props["info"] = "disabled by configuration" return from xpra.scripts.config import OpenGL_safety_check from xpra.platform.gui import gl_check as platform_gl_check warnings = [] for check in (OpenGL_safety_check, platform_gl_check): opengllog("checking with %s", check) warning = check() opengllog("%s()=%s", check, warning) if warning: warnings.append(warning) self.opengl_props["info"] = "" if warnings: if enable_opengl is True: opengllog.warn( "OpenGL safety warning (enabled at your own risk):") for warning in warnings: opengllog.warn(" %s", warning) self.opengl_props["info"] = "forced enabled despite: %s" % ( ", ".join(warnings)) else: opengllog.warn("OpenGL disabled:", warning) for warning in warnings: opengllog.warn(" %s", warning) self.opengl_props["info"] = "disabled: %s" % ( ", ".join(warnings)) return try: opengllog("init_opengl: going to import xpra.client.gl") __import__("xpra.client.gl", {}, {}, []) __import__("xpra.client.gl.gtk_compat", {}, {}, []) gl_check = __import__("xpra.client.gl.gl_check", {}, {}, ["check_support"]) opengllog("init_opengl: gl_check=%s", gl_check) self.opengl_props = gl_check.check_support( force_enable=(enable_opengl is True)) opengllog("init_opengl: found props %s", self.opengl_props) GTK_GL_CLIENT_WINDOW_MODULE = "xpra.client.gl.gtk%s.gl_client_window" % ( 2 + int(is_gtk3())) opengllog( "init_opengl: trying to load GL client window module '%s'", GTK_GL_CLIENT_WINDOW_MODULE) gl_client_window = __import__(GTK_GL_CLIENT_WINDOW_MODULE, {}, {}, ["GLClientWindow"]) self.GLClientWindowClass = gl_client_window.GLClientWindow self.client_supports_opengl = True #only enable opengl by default if force-enabled or if safe to do so: self.opengl_enabled = ( enable_opengl is True) or self.opengl_props.get("safe", False) self.gl_texture_size_limit = self.opengl_props.get( "texture-size-limit", 16 * 1024) self.gl_max_viewport_dims = self.opengl_props.get( "max-viewport-dims", (self.gl_texture_size_limit, self.gl_texture_size_limit)) if min(self.gl_max_viewport_dims) < 4 * 1024: opengllog.warn("Warning: OpenGL is disabled:") opengllog.warn(" the maximum viewport size is too low: %s", self.gl_max_viewport_dims) self.opengl_enabled = False elif self.gl_texture_size_limit < 4 * 1024: opengllog.warn("Warning: OpenGL is disabled:") opengllog.warn(" the texture size limit is too low: %s", self.gl_texture_size_limit) self.opengl_enabled = False self.GLClientWindowClass.MAX_VIEWPORT_DIMS = self.gl_max_viewport_dims self.GLClientWindowClass.MAX_BACKING_DIMS = self.gl_texture_size_limit, self.gl_texture_size_limit self.GLClientWindowClass.MAX_VIEWPORT_DIMS = 8192, 8192 self.GLClientWindowClass.MAX_BACKING_DIMS = 4096, 4096 mww, mwh = self.max_window_size opengllog( "OpenGL: enabled=%s, texture-size-limit=%s, max-window-size=%s", self.opengl_enabled, self.gl_texture_size_limit, self.max_window_size) if self.opengl_enabled and self.gl_texture_size_limit < 16 * 1024 and ( mww == 0 or mwh == 0 or self.gl_texture_size_limit < mww or self.gl_texture_size_limit < mwh): #log at warn level if the limit is low: #(if we're likely to hit it - if the screen is as big or bigger) w, h = self.get_root_size() l = opengllog.info if w >= self.gl_texture_size_limit or h >= self.gl_texture_size_limit: l = log.warn l( "Warning: OpenGL windows will be clamped to the maximum texture size %ix%i", self.gl_texture_size_limit, self.gl_texture_size_limit) l(" for OpenGL %s renderer '%s'", pver(self.opengl_props.get("opengl", "")), self.opengl_props.get("renderer", "unknown")) driver_info = self.opengl_props.get( "renderer") or self.opengl_props.get( "vendor") or "unknown card" if self.opengl_enabled: opengllog.info("OpenGL enabled with %s", driver_info) elif self.client_supports_opengl: opengllog("OpenGL supported with %s, but not enabled", driver_info) except ImportError as e: opengllog.warn("OpenGL support is missing:") opengllog.warn(" %s", e) self.opengl_props["info"] = str(e) except RuntimeError as e: opengllog.warn( "OpenGL support could not be enabled on this hardware:") opengllog.warn(" %s", e) self.opengl_props["info"] = str(e) except Exception as e: opengllog.error("Error loading OpenGL support:") opengllog.error(" %s", e, exc_info=True) self.opengl_props["info"] = str(e)
def test_pver(self): self.assertEqual(pver(""), "") self.assertEqual(pver("any string"), "any string") self.assertEqual(pver((1, 2, 3)), "1.2.3")
def print_dict(d): for k in sorted(d.keys()): v = d[k] print("* %s : %s" % (str(k).replace(".version", "").ljust(12), nonl(pver(v))))