def initializeGL(self): qDebug("initializeGL()") if self.logger: self.logger.initialize() self.logger.messageLogged.connect(lambda message: qDebug(self.__tr("OpenGL debug message: {0}").fomat(message.message()))) self.logger.startLogging() gl = QOpenGLContext.currentContext().versionFunctions(glVersionProfile) QOpenGLContext.currentContext().aboutToBeDestroyed.connect(self.cleanup) self.clean = False fragmentShader = None vertexShader = vertexShader2D if self.ddsFile.isCubemap: fragmentShader = fragmentShaderCube vertexShader = vertexShaderCube if QOpenGLContext.currentContext().hasExtension(b"GL_ARB_seamless_cube_map"): GL_TEXTURE_CUBE_MAP_SEAMLESS = 0x884F gl.glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS) elif self.ddsFile.glFormat.samplerType == "F": fragmentShader = fragmentShaderFloat elif self.ddsFile.glFormat.samplerType == "UI": fragmentShader = fragmentShaderUInt else: fragmentShader = fragmentShaderSInt self.program = QOpenGLShaderProgram(self) self.program.addShaderFromSourceCode(QOpenGLShader.Vertex, vertexShader) self.program.addShaderFromSourceCode(QOpenGLShader.Fragment, fragmentShader) self.program.bindAttributeLocation("position", 0) self.program.bindAttributeLocation("texCoordIn", 1) self.program.link() self.transparecyProgram = QOpenGLShaderProgram(self) self.transparecyProgram.addShaderFromSourceCode(QOpenGLShader.Vertex, transparencyVS) self.transparecyProgram.addShaderFromSourceCode(QOpenGLShader.Fragment, transparencyFS) self.transparecyProgram.bindAttributeLocation("position", 0) self.transparecyProgram.link() self.vao = QOpenGLVertexArrayObject(self) vaoBinder = QOpenGLVertexArrayObject.Binder(self.vao) self.vbo = QOpenGLBuffer(QOpenGLBuffer.VertexBuffer) self.vbo.create() self.vbo.bind() theBytes = struct.pack("%sf" % len(vertices), *vertices) self.vbo.allocate(theBytes, len(theBytes)) gl.glEnableVertexAttribArray(0) gl.glEnableVertexAttribArray(1) gl.glVertexAttribPointer(0, 4, gl.GL_FLOAT, False, 6 * 4, 0) gl.glVertexAttribPointer(1, 2, gl.GL_FLOAT, False, 6 * 4, 4 * 4) self.texture = self.ddsFile.asQOpenGLTexture(gl, QOpenGLContext.currentContext())
def paintGL(self): qDebug("paintGL()") gl = QOpenGLContext.currentContext().versionFunctions(glVersionProfile) vaoBinder = QOpenGLVertexArrayObject.Binder(self.vao) # Draw checkerboard so transparency is obvious self.transparecyProgram.bind() if self.backgroundColour and self.backgroundColour.isValid(): self.transparecyProgram.setUniformValue("backgroundColour", self.backgroundColour) gl.glDrawArrays(gl.GL_TRIANGLES, 0, 6) self.transparecyProgram.release() self.program.bind() if self.texture: self.texture.bind() gl.glEnable(gl.GL_BLEND) gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA) gl.glDrawArrays(gl.GL_TRIANGLES, 0, 6) if self.texture: self.texture.release() self.program.release()
def create(self): from PyQt5.QtGui import QOpenGLContext self._id = gl.glGenTextures(1) context = QOpenGLContext.currentContext() self._opengl_context = context from seamless import add_opengl_destructor add_opengl_destructor(context, self.destroy)
def event(self, event): if event.type == Event.ViewActivateEvent: # FIX: on Max OS X, somehow QOpenGLContext.currentContext() can become None during View switching. # This can happen when you do the following steps: # 1. Start Cura # 2. Load a model # 3. Switch to Custom mode # 4. Select the model and click on the per-object tool icon # 5. Switch view to Layer view or X-Ray # 6. Cura will very likely crash # It seems to be a timing issue that the currentContext can somehow be empty, but I have no clue why. # This fix tries to reschedule the view changing event call on the Qt thread again if the current OpenGL # context is None. if Platform.isOSX(): if QOpenGLContext.currentContext() is None: Logger.log("d", "current context of OpenGL is empty on Mac OS X, will try to create shaders later") Application.getInstance().callLater(lambda e = event: self.event(e)) return if event.type == Event.ViewDeactivateEvent: if self._composite_pass and 'xray' in self._composite_pass.getLayerBindings(): self.getRenderer().removeRenderPass(self._xray_pass) self._composite_pass.setLayerBindings(self._old_layer_bindings) self._composite_pass.setCompositeShader(self._old_composite_shader) self._xray_warning_message.hide()
def initializeGL(self): import seamless from PyQt5.QtGui import QOpenGLContext if self._destroyed: return try: old_running_qt = seamless._running_qt seamless._running_qt = True activate_opengl() ctx = self.context() assert ctx is QOpenGLContext.currentContext() #print("start initializeGL") if not self._initialized: add_opengl_context(ctx) self._initialized = True self.camera.width = self.width() self.camera.height = self.height() self.camera._write() #print("INIT") PINS.init.set() #print("end initializeGL") finally: seamless._running_qt = old_running_qt deactivate_opengl() super().initializeGL() seamless.run_work()
def initializeGL(self): import seamless from PyQt5.QtGui import QOpenGLContext if self._destroyed: return try: old_running_qt = seamless._running_qt seamless._running_qt = True activate_opengl() ctx = self.context() assert ctx is QOpenGLContext.currentContext() #print("start initializeGL") if not self._initialized: add_opengl_context(ctx) self._initialized = True self.camera.width = self.width() self.camera.height = self.height() self.camera._write() #print("INIT") PINS.init.set() #print("end initializeGL") finally: seamless._running_qt = old_running_qt deactivate_opengl() super().initializeGL() seamless.run_work()
def __init__(self) -> None: if OpenGL.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) OpenGL.__instance = self super().__init__() profile = QOpenGLVersionProfile() profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version) profile.setProfile(OpenGLContext.profile) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) # type: Any #It's actually a protected class in PyQt that depends on the implementation of your graphics card. if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical(None, i18n_catalog.i18nc("@message", "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers.")) sys.exit(1) # It would be nice to be able to not necessarily need OpenGL FrameBuffer Object support, but # due to a limitation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFrameBufferObject::toImage(), making us # hard-depend on FrameBuffer Objects. if not self.hasFrameBufferObjects(): Logger.log("e", "Startup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical(None, i18n_catalog.i18nc("Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers.")) sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other #type: int vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel # WORKAROUND: Cura/#1117 Cura-packaging/12 # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5. # This workaround makes the code fall back to a "Unknown" renderer in these cases. try: self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) #type: str except UnicodeDecodeError: Logger.log("e", "DecodeError while getting GL_RENDERER via glGetString!") self._gpu_type = "Unknown" #type: str self._opengl_version = self._gl.glGetString(self._gl.GL_VERSION) #type: str if not self.hasFrameBufferObjects(): Logger.log("w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._opengl_version) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def _initialize(self): profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) self._gl.initializeOpenGLFunctions() self._default_material = self.createMaterial( Resources.getPath(Resources.Shaders, "default.vert"), Resources.getPath(Resources.Shaders, "default.frag") ) self._default_material.setUniformValue("u_ambientColor", Color(0.3, 0.3, 0.3, 1.0)) self._default_material.setUniformValue("u_diffuseColor", Color(0.5, 0.5, 0.5, 1.0)) self._default_material.setUniformValue("u_specularColor", Color(0.7, 0.7, 0.7, 1.0)) self._default_material.setUniformValue("u_shininess", 20.) self._selection_buffer = self.createFrameBuffer(128, 128) self._selection_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "color.frag") ) self._handle_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "vertexcolor.frag") ) self._outline_material = self.createMaterial( Resources.getPath(Resources.Shaders, "outline.vert"), Resources.getPath(Resources.Shaders, "outline.frag") ) self._initialized = True
def _initialize(self): profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) self._gl.initializeOpenGLFunctions() self._default_material = self.createMaterial( Resources.getPath(Resources.Shaders, "default.vert"), Resources.getPath(Resources.Shaders, "default.frag")) self._default_material.setUniformValue("u_ambientColor", Color(0.3, 0.3, 0.3, 1.0)) self._default_material.setUniformValue("u_diffuseColor", Color(0.5, 0.5, 0.5, 1.0)) self._default_material.setUniformValue("u_specularColor", Color(0.7, 0.7, 0.7, 1.0)) self._default_material.setUniformValue("u_shininess", 20.) self._selection_buffer = self.createFrameBuffer(128, 128) self._selection_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "color.frag")) self._handle_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "vertexcolor.frag")) self._outline_material = self.createMaterial( Resources.getPath(Resources.Shaders, "outline.vert"), Resources.getPath(Resources.Shaders, "outline.frag")) self._initialized = True
def event(self, event): if event.type == Event.ViewActivateEvent: # FIX: on Max OS X, somehow QOpenGLContext.currentContext() can become None during View switching. # This can happen when you do the following steps: # 1. Start Cura # 2. Load a model # 3. Switch to Custom mode # 4. Select the model and click on the per-object tool icon # 5. Switch view to Layer view or X-Ray # 6. Cura will very likely crash # It seems to be a timing issue that the currentContext can somehow be empty, but I have no clue why. # This fix tries to reschedule the view changing event call on the Qt thread again if the current OpenGL # context is None. if Platform.isOSX(): if QOpenGLContext.currentContext() is None: Logger.log( "d", "current context of OpenGL is empty on Mac OS X, will try to create shaders later" ) CuraApplication.getInstance().callLater( lambda e=event: self.event(e)) return if not self._xray_pass: # Currently the RenderPass constructor requires a size > 0 # This should be fixed in RenderPass's constructor. self._xray_pass = XRayPass.XRayPass(1, 1) self.getRenderer().addRenderPass(self._xray_pass) if not self._xray_composite_shader: self._xray_composite_shader = OpenGL.getInstance( ).createShaderProgram( Resources.getPath(Resources.Shaders, "xray_composite.shader")) theme = Application.getInstance().getTheme() self._xray_composite_shader.setUniformValue( "u_background_color", Color(*theme.getColor("viewport_background").getRgb())) self._xray_composite_shader.setUniformValue( "u_outline_color", Color(*theme.getColor("model_selection_outline").getRgb())) if not self._composite_pass: self._composite_pass = self.getRenderer().getRenderPass( "composite") self._old_layer_bindings = self._composite_pass.getLayerBindings() self._composite_pass.setLayerBindings( ["default", "selection", "xray"]) self._old_composite_shader = self._composite_pass.getCompositeShader( ) self._composite_pass.setCompositeShader( self._xray_composite_shader) if event.type == Event.ViewDeactivateEvent: self.getRenderer().removeRenderPass(self._xray_pass) self._composite_pass.setLayerBindings(self._old_layer_bindings) self._composite_pass.setCompositeShader(self._old_composite_shader)
def opengl_info() -> Optional[OpenGLInfo]: # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ assert QApplication.instance() override = os.environ.get('QUTE_FAKE_OPENGL') if override is not None: log.init.debug("Using override {}".format(override)) vendor, version = override.split(', ', maxsplit=1) return OpenGLInfo.parse(vendor=vendor, version=version) old_context = cast(Optional[QOpenGLContext], QOpenGLContext.currentContext()) old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return OpenGLInfo(gles=True) vp = QOpenGLVersionProfile() vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) except ImportError as e: log.init.debug("Importing version functions failed: {}".format(e)) return None if vf is None: log.init.debug("Getting version functions failed!") return None vendor = vf.glGetString(vf.GL_VENDOR) version = vf.glGetString(vf.GL_VERSION) return OpenGLInfo.parse(vendor=vendor, version=version) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def isOpenGLES(): try: # isOpenGLES() was introduced in Qt v5.3. return QOpenGLContext.currentContext().isOpenGLES() except: pass return False
def __init__(self): profile = QOpenGLVersionProfile() profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version) profile.setProfile(OpenGLContext.profile) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical( None, "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers." ) sys.exit(1) # It would be nice to be able to not necessarily need OpenGL Framebuffer Object support, but # due to a limiation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFramebufferObject::toImage(), making us # hard-depend on Framebuffer Objects. if not self.hasFrameBufferObjects(): Logger.log( "e", "Starup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical( None, "Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers." ) sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) if not self.hasFrameBufferObjects(): Logger.log( "w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._gl.glGetString(self._gl.GL_VERSION)) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def sanity_check(self): from PyQt5.QtGui import QOpenGLContext context = QOpenGLContext.currentContext() assert context assert threading.current_thread() is threading.main_thread() if self._opengl_context is not None: #assert context is self._opengl_context if context is not self._opengl_context: self.destroy() self.create()
def hasExtension(cls, extension_name: str, ctx=None) -> bool: """Check to see if the current OpenGL implementation has a certain OpenGL extension. :param extension_name: :type{string} The name of the extension to query for. :param ctx: optionally provide context object to be used, or current context will be used. :return: True if the extension is available, False if not. """ if ctx is None: ctx = QOpenGLContext.currentContext() return ctx.hasExtension(bytearray(extension_name, "utf-8"))
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ assert QApplication.instance() override = os.environ.get('QUTE_FAKE_OPENGL_VENDOR') if override is not None: log.init.debug("Using override {}".format(override)) return override old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) except ImportError as e: log.init.debug("Importing version functions failed: {}".format(e)) return None if vf is None: log.init.debug("Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def supportsVertexArrayObjects(cls, ctx = None): if ctx is None: ctx = QOpenGLContext.currentContext() result = False if cls.major_version == 4 and cls.minor_version >= 1: result = True if not result and cls.major_version > 4: result = True if not result and cls.hasExtension("GL_ARB_vertex_array_object", ctx = ctx): result = True cls.properties["supportsVertexArrayObjects"] = result return result
def supportsVertexArrayObjects(cls, ctx = None): if ctx is None: ctx = QOpenGLContext.currentContext() result = False if cls.major_version == 4 and cls.minor_version >= 1: result = True if not result and cls.major_version > 4: result = True if not result and cls.hasExtension("GL_ARB_vertex_array_object", ctx = ctx): result = True cls.properties["supportsVertexArrayObjects"] = result return result
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ assert QApplication.instance() old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("opengl_vendor: Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("opengl_vendor: Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) except ImportError as e: log.init.debug("opengl_vendor: Importing version functions " "failed: {}".format(e)) return None if vf is None: log.init.debug("opengl_vendor: Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ # We're doing those imports here because this is only available with Qt 5.4 # or newer. from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface) assert QApplication.instance() old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("opengl_vendor: Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("opengl_vendor: Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) if vf is None: log.init.debug("opengl_vendor: Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def __init__(self, args, glWidget=QOpenGLWidget, requestedGLVersion=(2, 1)): super(_TestApplication, self).__init__(args) glType = QOpenGLContext.openGLModuleType() format = QSurfaceFormat() format.setDepthBufferSize(24) if glType == QOpenGLContext.LibGL: info("OPENGL MODULE TYPE", "LibGL") format.setVersion(requestedGLVersion[0], requestedGLVersion[1]) format.setProfile(QSurfaceFormat.CompatibilityProfile) format.setOption(QSurfaceFormat.DebugContext) QSurfaceFormat.setDefaultFormat(format) else: info( "OPENGL MODULE TYPE", "Unknown or LibGLES <--- this is likely to cause problems down the line" ) self.debugMembers = False self.mainWin = QWidget() if glWidget == QOpenGLWidget: # Only hard code size if we're not using a canvas self.mainWin.resize(600, 600) self.mainWin.setWindowTitle('TestApplication') self.mainWidget = glWidget() self.layout = QHBoxLayout(self.mainWin) self.layout.addWidget(self.mainWidget) self.mainWin.show() ctx = QOpenGLContext.currentContext() info("GL CURRENT CONTEXT", ctx) format = ctx.format() info("EFFECTIVE GL VERSION", ctx.format().version()) self.aboutToQuit.connect(self.applicationClosing)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ # We're doing those imports here because this is only available with Qt 5.4 # or newer. from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface) assert QApplication.instance() old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("opengl_vendor: Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("opengl_vendor: Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) if vf is None: log.init.debug("opengl_vendor: Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def hasExtension(cls, extension_name: str, ctx=None) -> bool: """Check to see if the current OpenGL implementation has a certain OpenGL extension. :param extension_name: :type{string} The name of the extension to query for. :param ctx: optionally provide context object to be used, or current context will be used. :return: True if the extension is available, False if not. """ if ctx is None: ctx = QOpenGLContext.currentContext() if ctx is None: # We failed to get the current context. # The typing claims that this doesn't happen, yet results in the field indicate # that it does. See sentry crash CURA-87 return False return ctx.hasExtension(bytearray(extension_name, "utf-8"))
def beginRendering(self): if not self._initialized: self._initialize() self._gl.glViewport(0, 0, self._viewport_width, self._viewport_height) self._gl.glClearColor(self._background_color.redF(), self._background_color.greenF(), self._background_color.blueF(), self._background_color.alphaF()) self._gl.glClear(self._gl.GL_COLOR_BUFFER_BIT | self._gl.GL_DEPTH_BUFFER_BIT) if not QOpenGLContext.currentContext().format().renderableType() == QSurfaceFormat.OpenGLES: self._gl.glPointSize(2) self._solids_queue.clear() self._transparent_queue.clear() self._overlay_queue.clear() self._render_selection = True
def supportsVertexArrayObjects(cls, ctx=None) -> bool: """Return if the current (or provided) context supports Vertex Array Objects :param ctx: (optional) context. """ if ctx is None: ctx = QOpenGLContext.currentContext() result = False if cls.major_version == 4 and cls.minor_version >= 1: result = True if not result and cls.major_version > 4: result = True if not result and cls.hasExtension("GL_ARB_vertex_array_object", ctx=ctx): result = True cls.properties["supportsVertexArrayObjects"] = result return result
def event(self, event): if event.type == Event.ViewActivateEvent: # FIX: on Max OS X, somehow QOpenGLContext.currentContext() can become None during View switching. # This can happen when you do the following steps: # 1. Start Cura # 2. Load a model # 3. Switch to Custom mode # 4. Select the model and click on the per-object tool icon # 5. Switch view to Layer view or X-Ray # 6. Cura will very likely crash # It seems to be a timing issue that the currentContext can somehow be empty, but I have no clue why. # This fix tries to reschedule the view changing event call on the Qt thread again if the current OpenGL # context is None. if Platform.isOSX(): if QOpenGLContext.currentContext() is None: Logger.log("d", "current context of OpenGL is empty on Mac OS X, will try to create shaders later") CuraApplication.getInstance().callLater(lambda e = event: self.event(e)) return if not self._xray_pass: # Currently the RenderPass constructor requires a size > 0 # This should be fixed in RenderPass's constructor. self._xray_pass = XRayPass.XRayPass(1, 1) self.getRenderer().addRenderPass(self._xray_pass) if not self._xray_composite_shader: self._xray_composite_shader = OpenGL.getInstance().createShaderProgram(os.path.join(PluginRegistry.getInstance().getPluginPath("XRayView"), "xray_composite.shader")) theme = Application.getInstance().getTheme() self._xray_composite_shader.setUniformValue("u_background_color", Color(*theme.getColor("viewport_background").getRgb())) self._xray_composite_shader.setUniformValue("u_error_color", Color(*theme.getColor("xray_error").getRgb())) self._xray_composite_shader.setUniformValue("u_outline_color", Color(*theme.getColor("model_selection_outline").getRgb())) if not self._composite_pass: self._composite_pass = self.getRenderer().getRenderPass("composite") self._old_layer_bindings = self._composite_pass.getLayerBindings() self._composite_pass.setLayerBindings(["default", "selection", "xray"]) self._old_composite_shader = self._composite_pass.getCompositeShader() self._composite_pass.setCompositeShader(self._xray_composite_shader) if event.type == Event.ViewDeactivateEvent: self.getRenderer().removeRenderPass(self._xray_pass) self._composite_pass.setLayerBindings(self._old_layer_bindings) self._composite_pass.setCompositeShader(self._old_composite_shader)
def __init__(self): super(LogoRenderer, self).__init__() self.m_fAngle = None self.m_fScale = None self.vertices = [] self.normals = [] self.program1 = QOpenGLShaderProgram() self.vertexAttr1 = 0 self.normalAttr1 = 0 self.matrixUniform1 = 0 ver = QOpenGLVersionProfile() ver.setVersion(2, 1) cntx = QOpenGLContext.currentContext() #print("QOpenGLContext:", cntx, ver) fmt = cntx.format() fmt.setVersion(2, 1) cntx.setFormat(fmt) self.gl = cntx.versionFunctions(ver)
def __init__(self): super(LogoRenderer, self).__init__() self.m_fAngle = None self.m_fScale = None self.vertices = [] self.normals = [] self.program1 = QOpenGLShaderProgram() self.vertexAttr1 = 0 self.normalAttr1 = 0 self.matrixUniform1 = 0 ver = QOpenGLVersionProfile() ver.setVersion(2, 1) cntx = QOpenGLContext.currentContext() #print("QOpenGLContext:", cntx, ver) fmt = cntx.format() fmt.setVersion(2, 1) cntx.setFormat(fmt) self.gl = cntx.versionFunctions(ver)
def __init__(self, args): super(TestApplication, self).__init__(args) print("EFFECTIVE QT VERSION : " + str(QT_VERSION_STR)) self.mainWin = QWidget() self.mainWin.resize(600, 600) self.mainWin.setWindowTitle('Minimal GL test') self.mainWidget = QOpenGLWidget() self.layout = QVBoxLayout(self.mainWin) self.layout.addWidget(self.mainWidget) self.mainWin.show() ctx = QOpenGLContext.currentContext() format = ctx.format() print("EFFECTIVE GL VERSION : " + str(ctx.format().version()))
def initializeGL(self): super().initializeGL() self.camera.width = self.width() self.camera.height = self.height() self.camera._write() activate_opengl() if self._destroyed: return from PyQt5.QtGui import QOpenGLContext #print("INIT") ctx = self.context() assert ctx is QOpenGLContext.currentContext() #print("start initializeGL") if not self._initialized: add_opengl_context(ctx) self._initialized = True PINS.init.set() #print("end initializeGL") deactivate_opengl()
def __init__(self): profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical("Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers.") sys.exit(1) # It would be nice to be able to not necessarily need OpenGL Framebuffer Object support, but # due to a limiation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFramebufferObject::toImage(), making us # hard-depend on Framebuffer Objects. if not self.hasFrameBufferObjects(): Logger.log("e", "Starup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical("Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers.") sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) if not self.hasFrameBufferObjects(): Logger.log("w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._gl.glGetString(self._gl.GL_VERSION)) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def beginRendering(self): if not self._initialized: self._initialize() self._gl.glViewport(0, 0, self._viewport_width, self._viewport_height) self._gl.glClearColor(self._background_color.redF(), self._background_color.greenF(), self._background_color.blueF(), self._background_color.alphaF()) self._gl.glClear(self._gl.GL_COLOR_BUFFER_BIT | self._gl.GL_DEPTH_BUFFER_BIT) if not QOpenGLContext.currentContext().format().renderableType( ) == QSurfaceFormat.OpenGLES: self._gl.glPointSize(2) self._solids_queue.clear() self._transparent_queue.clear() self._overlay_queue.clear() self._render_selection = True
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ # We're doing those imports here because this is only available with Qt 5.4 # or newer. from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface) assert QApplication.instance() assert QOpenGLContext.currentContext() is None surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() assert ok ok = ctx.makeCurrent(surface) assert ok if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) vendor = vf.glGetString(vf.GL_VENDOR) ctx.doneCurrent() return vendor
def hasExtension(self, extension): return QOpenGLContext.currentContext().hasExtension(extension)
def event(self, event): modifiers = QApplication.keyboardModifiers() ctrl_is_active = modifiers & Qt.ControlModifier shift_is_active = modifiers & Qt.ShiftModifier if event.type == Event.KeyPressEvent and ctrl_is_active: amount = 10 if shift_is_active else 1 if event.key == KeyEvent.UpKey: self.setLayer(self._current_layer_num + amount) return True if event.key == KeyEvent.DownKey: self.setLayer(self._current_layer_num - amount) return True if event.type == Event.ViewActivateEvent: # FIX: on Max OS X, somehow QOpenGLContext.currentContext() can become None during View switching. # This can happen when you do the following steps: # 1. Start Cura # 2. Load a model # 3. Switch to Custom mode # 4. Select the model and click on the per-object tool icon # 5. Switch view to Layer view or X-Ray # 6. Cura will very likely crash # It seems to be a timing issue that the currentContext can somehow be empty, but I have no clue why. # This fix tries to reschedule the view changing event call on the Qt thread again if the current OpenGL # context is None. if Platform.isOSX(): if QOpenGLContext.currentContext() is None: Logger.log("d", "current context of OpenGL is empty on Mac OS X, will try to create shaders later") CuraApplication.getInstance().callLater(lambda e=event: self.event(e)) return # Make sure the SimulationPass is created layer_pass = self.getSimulationPass() self.getRenderer().addRenderPass(layer_pass) # Make sure the NozzleNode is add to the root nozzle = self.getNozzleNode() nozzle.setParent(self.getController().getScene().getRoot()) nozzle.setVisible(False) Application.getInstance().globalContainerStackChanged.connect(self._onGlobalStackChanged) self._onGlobalStackChanged() if not self._simulationview_composite_shader: self._simulationview_composite_shader = OpenGL.getInstance().createShaderProgram(os.path.join(PluginRegistry.getInstance().getPluginPath("SimulationView"), "simulationview_composite.shader")) theme = Application.getInstance().getTheme() self._simulationview_composite_shader.setUniformValue("u_background_color", Color(*theme.getColor("viewport_background").getRgb())) self._simulationview_composite_shader.setUniformValue("u_outline_color", Color(*theme.getColor("model_selection_outline").getRgb())) if not self._composite_pass: self._composite_pass = self.getRenderer().getRenderPass("composite") self._old_layer_bindings = self._composite_pass.getLayerBindings()[:] # make a copy so we can restore to it later self._composite_pass.getLayerBindings().append("simulationview") self._old_composite_shader = self._composite_pass.getCompositeShader() self._composite_pass.setCompositeShader(self._simulationview_composite_shader) elif event.type == Event.ViewDeactivateEvent: self._wireprint_warning_message.hide() Application.getInstance().globalContainerStackChanged.disconnect(self._onGlobalStackChanged) if self._global_container_stack: self._global_container_stack.propertyChanged.disconnect(self._onPropertyChanged) self._nozzle_node.setParent(None) self.getRenderer().removeRenderPass(self._layer_pass) self._composite_pass.setLayerBindings(self._old_layer_bindings) self._composite_pass.setCompositeShader(self._old_composite_shader)
def hasExtension(cls, extension_name, ctx = None): if ctx is None: ctx = QOpenGLContext.currentContext() return ctx.hasExtension(bytearray(extension_name, "utf-8"))
def supportsVertexArrayObjects(cls, ctx = None): if ctx is None: ctx = QOpenGLContext.currentContext() result = cls.hasExtension("GL_ARB_vertex_array_object", ctx = ctx) cls.properties["supportsVertexArrayObjects"] = result return result
def hasExtension(self, extension): return QOpenGLContext.currentContext().hasExtension(extension)
def hasExtension(cls, extension_name: str, ctx=None) -> bool: if ctx is None: ctx = QOpenGLContext.currentContext() return ctx.hasExtension(bytearray(extension_name, "utf-8"))
def event(self, event): modifiers = QApplication.keyboardModifiers() ctrl_is_active = modifiers & Qt.ControlModifier shift_is_active = modifiers & Qt.ShiftModifier if event.type == Event.KeyPressEvent and ctrl_is_active: amount = 10 if shift_is_active else 1 if event.key == KeyEvent.UpKey: self.setLayer(self._current_layer_num + amount) return True if event.key == KeyEvent.DownKey: self.setLayer(self._current_layer_num - amount) return True if event.type == Event.ViewActivateEvent: # FIX: on Max OS X, somehow QOpenGLContext.currentContext() can become None during View switching. # This can happen when you do the following steps: # 1. Start Cura # 2. Load a model # 3. Switch to Custom mode # 4. Select the model and click on the per-object tool icon # 5. Switch view to Layer view or X-Ray # 6. Cura will very likely crash # It seems to be a timing issue that the currentContext can somehow be empty, but I have no clue why. # This fix tries to reschedule the view changing event call on the Qt thread again if the current OpenGL # context is None. if Platform.isOSX(): if QOpenGLContext.currentContext() is None: Logger.log("d", "current context of OpenGL is empty on Mac OS X, will try to create shaders later") CuraApplication.getInstance().callLater(lambda e=event: self.event(e)) return # Make sure the SimulationPass is created layer_pass = self.getSimulationPass() self.getRenderer().addRenderPass(layer_pass) # Make sure the NozzleNode is add to the root nozzle = self.getNozzleNode() nozzle.setParent(self.getController().getScene().getRoot()) nozzle.setVisible(False) Application.getInstance().globalContainerStackChanged.connect(self._onGlobalStackChanged) self._onGlobalStackChanged() if not self._simulationview_composite_shader: self._simulationview_composite_shader = OpenGL.getInstance().createShaderProgram(os.path.join(PluginRegistry.getInstance().getPluginPath("SimulationView"), "simulationview_composite.shader")) theme = Application.getInstance().getTheme() self._simulationview_composite_shader.setUniformValue("u_background_color", Color(*theme.getColor("viewport_background").getRgb())) self._simulationview_composite_shader.setUniformValue("u_outline_color", Color(*theme.getColor("model_selection_outline").getRgb())) if not self._composite_pass: self._composite_pass = self.getRenderer().getRenderPass("composite") self._old_layer_bindings = self._composite_pass.getLayerBindings()[:] # make a copy so we can restore to it later self._composite_pass.getLayerBindings().append("simulationview") self._old_composite_shader = self._composite_pass.getCompositeShader() self._composite_pass.setCompositeShader(self._simulationview_composite_shader) elif event.type == Event.ViewDeactivateEvent: self._wireprint_warning_message.hide() Application.getInstance().globalContainerStackChanged.disconnect(self._onGlobalStackChanged) if self._global_container_stack: self._global_container_stack.propertyChanged.disconnect(self._onPropertyChanged) self._nozzle_node.setParent(None) self.getRenderer().removeRenderPass(self._layer_pass) self._composite_pass.setLayerBindings(self._old_layer_bindings) self._composite_pass.setCompositeShader(self._old_composite_shader)
def __init__(self) -> None: if OpenGL.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) OpenGL.__instance = self super().__init__() profile = QOpenGLVersionProfile() profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version) profile.setProfile(OpenGLContext.profile) context = QOpenGLContext.currentContext() if not context: Logger.log( "e", "Startup failed due to OpenGL context creation failing") QMessageBox.critical( None, i18n_catalog.i18nc( "@message", "Failed to Initialize OpenGL", "Could not initialize an OpenGL context. This program requires OpenGL 2.0 or higher. Please check your video card drivers." )) sys.exit(1) self._gl = context.versionFunctions( profile ) # type: Any #It's actually a protected class in PyQt that depends on the implementation of your graphics card. if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical( None, i18n_catalog.i18nc( "@message", "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers." )) sys.exit(1) # It would be nice to be able to not necessarily need OpenGL FrameBuffer Object support, but # due to a limitation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFrameBufferObject::toImage(), making us # hard-depend on FrameBuffer Objects. if not self.hasFrameBufferObjects(): Logger.log( "e", "Startup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical( None, i18n_catalog.i18nc( "Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers." )) sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other #type: int vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel self._gpu_type = "Unknown" # type: str # WORKAROUND: Cura/#1117 Cura-packaging/12 # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5. # This workaround makes the code fall back to a "Unknown" renderer in these cases. try: self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) except UnicodeDecodeError: Logger.log( "e", "DecodeError while getting GL_RENDERER via glGetString!") self._opengl_version = self._gl.glGetString( self._gl.GL_VERSION) #type: str if not self.hasFrameBufferObjects(): Logger.log( "w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._opengl_version) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def _initialize(self) -> None: profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) self._gl.initializeOpenGLFunctions()