def initializeGL(self): version_profile = QOpenGLVersionProfile() version_profile.setVersion(2, 0) self.gl = self.context().versionFunctions(version_profile) self.gl.initializeOpenGLFunctions() self.makeObject() self.gl.glEnable(self.gl.GL_DEPTH_TEST) self.gl.glEnable(self.gl.GL_CULL_FACE) vshader = QOpenGLShader(QOpenGLShader.Vertex, self) vshader.compileSourceCode(self.vsrc) fshader = QOpenGLShader(QOpenGLShader.Fragment, self) fshader.compileSourceCode(self.fsrc) self.program = QOpenGLShaderProgram() self.program.addShader(vshader) self.program.addShader(fshader) self.program.bindAttributeLocation("vertex", self.PROGRAM_VERTEX_ATTRIBUTE) self.program.bindAttributeLocation("texCoord", self.PROGRAM_TEXCOORD_ATTRIBUTE) self.program.link() self.program.bind() self.program.setUniformValue("texture", 0) self.program.enableAttributeArray(self.PROGRAM_VERTEX_ATTRIBUTE) self.program.enableAttributeArray(self.PROGRAM_TEXCOORD_ATTRIBUTE) self.program.setAttributeArray(self.PROGRAM_VERTEX_ATTRIBUTE, self.vertices) self.program.setAttributeArray(self.PROGRAM_TEXCOORD_ATTRIBUTE, self.texCoords)
def initializeGL(self): version_profile = QOpenGLVersionProfile() version_profile.setVersion(2, 0) self.gl = self.context().versionFunctions(version_profile) self.gl.initializeOpenGLFunctions() self.object = self.makeObject()
def initializeGL(self): if not has_libGL: return profile = QOpenGLVersionProfile() profile.setVersion(4, 1) profile.setProfile(QSurfaceFormat.CoreProfile) glClearColor(0.85, 0.85, 0.85, 1.0) self.program = self.init_shaders() vertices, normals, tex_coords, faces, material = read_obj( self.obj_path) self.bounding_box = get_bounding_box(vertices) self.model_offset = -(self.bounding_box[0] + 0.5 * (self.bounding_box[1] - self.bounding_box[0]) ) # Offset to move the model's center into 0,0,0 self.texture = load_texture(material) glEnable(GL_DEPTH_TEST) glEnable(GL_CULL_FACE) self.vertex_buf, self.index_buf = self.init_buffers( vertices, normals, tex_coords, faces) self.initialized = True
def __init__(self) -> None: if OpenGL.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) OpenGL.__instance = self super().__init__() profile = QOpenGLVersionProfile() profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version) profile.setProfile(OpenGLContext.profile) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) # type: Any #It's actually a protected class in PyQt that depends on the implementation of your graphics card. if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical(None, i18n_catalog.i18nc("@message", "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers.")) sys.exit(1) # It would be nice to be able to not necessarily need OpenGL FrameBuffer Object support, but # due to a limitation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFrameBufferObject::toImage(), making us # hard-depend on FrameBuffer Objects. if not self.hasFrameBufferObjects(): Logger.log("e", "Startup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical(None, i18n_catalog.i18nc("Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers.")) sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other #type: int vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel # WORKAROUND: Cura/#1117 Cura-packaging/12 # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5. # This workaround makes the code fall back to a "Unknown" renderer in these cases. try: self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) #type: str except UnicodeDecodeError: Logger.log("e", "DecodeError while getting GL_RENDERER via glGetString!") self._gpu_type = "Unknown" #type: str self._opengl_version = self._gl.glGetString(self._gl.GL_VERSION) #type: str if not self.hasFrameBufferObjects(): Logger.log("w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._opengl_version) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def _initialize(self): profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) self._gl.initializeOpenGLFunctions() self._default_material = self.createMaterial( Resources.getPath(Resources.Shaders, "default.vert"), Resources.getPath(Resources.Shaders, "default.frag")) self._default_material.setUniformValue("u_ambientColor", Color(0.3, 0.3, 0.3, 1.0)) self._default_material.setUniformValue("u_diffuseColor", Color(0.5, 0.5, 0.5, 1.0)) self._default_material.setUniformValue("u_specularColor", Color(0.7, 0.7, 0.7, 1.0)) self._default_material.setUniformValue("u_shininess", 20.) self._selection_buffer = self.createFrameBuffer(128, 128) self._selection_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "color.frag")) self._handle_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "vertexcolor.frag")) self._outline_material = self.createMaterial( Resources.getPath(Resources.Shaders, "outline.vert"), Resources.getPath(Resources.Shaders, "outline.frag")) self._initialized = True
def __init__(self, workspace, surface: QSurface): super().__init__() self.surf = surface prof = QOpenGLVersionProfile() prof.setVersion(2, 0) self.vao = GL.glGenVertexArrays(1) self.vbo, self.vbo2 = GL.glGenBuffers(2) self.texture = -1 self.current_model: BlockModel = None self.workspace = workspace self.array1, array2 = None, None self.shader = QOpenGLShaderProgram() self.shader.addShaderFromSourceFile(QOpenGLShader.Vertex, "shader/block.vertex.glsl") self.shader.addShaderFromSourceFile(QOpenGLShader.Fragment, "shader/block.fragment.glsl") self.shader.link() self.proj_mat = glm.perspective( 1.57, self.surf.size().width() / self.surf.size().height(), 0.1, 100) self.texture = GL.glGenTextures(1)
def initializeGL(self): version_profile = QOpenGLVersionProfile() version_profile.setVersion(4, 1) version_profile.setProfile(QSurfaceFormat.CoreProfile) self.gl = self.context().versionFunctions(version_profile) if not self.gl: raise RuntimeError("unable to apply OpenGL version profile") self.gl.initializeOpenGLFunctions() self.open_file('settings.xml') self.point_cloud_render = PC.PointCloudRender() self.sensor_thread = RSXML.load('settings.xml') #self.sensor_thread = API.RealSenseThread(1, 'RealSenseThread') self.sensor_thread.connect(self.point_cloud_render) self.motor_control = MCXML.load('settings.xml') self.sensor_thread.motor_control = self.motor_control self.message_bot = MBXML.load('settings.xml') if self.message_bot is not None: self.message_bot.sensor = self.sensor_thread self.message_bot.start() self.sensor_thread.bot = self.message_bot self.sensor_thread.start() glClearColor(self.clear_color[0], self.clear_color[1], self.clear_color[2], 0.0) glEnable(GL_DEPTH_TEST) glDepthFunc(GL_LESS) glEnable(GL_CULL_FACE)
def renderNow(self): if not self.isExposed(): return self.m_update_pending = False needsInitialize = False if self.m_context is None: self.m_context = QOpenGLContext(self) self.m_context.setFormat(self.requestedFormat()) self.m_context.create() needsInitialize = True self.m_context.makeCurrent(self) if needsInitialize: version_profile = QOpenGLVersionProfile() version_profile.setVersion(2, 0) self.m_gl = self.m_context.versionFunctions(version_profile) self.m_gl.initializeOpenGLFunctions() self.initialize() self.render(self.m_gl) self.m_context.swapBuffers(self) if self.m_animating: self.renderLater()
def _initialize(self): profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) self._gl.initializeOpenGLFunctions() self._default_material = self.createMaterial( Resources.getPath(Resources.Shaders, "default.vert"), Resources.getPath(Resources.Shaders, "default.frag") ) self._default_material.setUniformValue("u_ambientColor", Color(0.3, 0.3, 0.3, 1.0)) self._default_material.setUniformValue("u_diffuseColor", Color(0.5, 0.5, 0.5, 1.0)) self._default_material.setUniformValue("u_specularColor", Color(0.7, 0.7, 0.7, 1.0)) self._default_material.setUniformValue("u_shininess", 20.) self._selection_buffer = self.createFrameBuffer(128, 128) self._selection_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "color.frag") ) self._handle_material = self.createMaterial( Resources.getPath(Resources.Shaders, "basic.vert"), Resources.getPath(Resources.Shaders, "vertexcolor.frag") ) self._outline_material = self.createMaterial( Resources.getPath(Resources.Shaders, "outline.vert"), Resources.getPath(Resources.Shaders, "outline.frag") ) self._initialized = True
def opengl_info() -> Optional[OpenGLInfo]: # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ assert QApplication.instance() override = os.environ.get('QUTE_FAKE_OPENGL') if override is not None: log.init.debug("Using override {}".format(override)) vendor, version = override.split(', ', maxsplit=1) return OpenGLInfo.parse(vendor=vendor, version=version) old_context = cast(Optional[QOpenGLContext], QOpenGLContext.currentContext()) old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return OpenGLInfo(gles=True) vp = QOpenGLVersionProfile() vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) except ImportError as e: log.init.debug("Importing version functions failed: {}".format(e)) return None if vf is None: log.init.debug("Getting version functions failed!") return None vendor = vf.glGetString(vf.GL_VENDOR) version = vf.glGetString(vf.GL_VERSION) return OpenGLInfo.parse(vendor=vendor, version=version) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def init_gl(self): global vertex_shader global fragment_shader version = QOpenGLVersionProfile() version.setVersion(2, 1) self.gl = self.context().versionFunctions(version) self.gl.initializeOpenGLFunctions()
def setWin(self, win): self.win = win ver = QOpenGLVersionProfile() ver.setVersion(2, 1) self.m_context = self.win.openglContext() self.gl = self.m_context.versionFunctions(ver)
def setWin(self, win): self.win = win ver = QOpenGLVersionProfile() ver.setVersion(2, 1) self.m_context = self.win.openglContext() self.gl = self.m_context.versionFunctions(ver)
def __init__(self): profile = QOpenGLVersionProfile() profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version) profile.setProfile(OpenGLContext.profile) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical( None, "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers." ) sys.exit(1) # It would be nice to be able to not necessarily need OpenGL Framebuffer Object support, but # due to a limiation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFramebufferObject::toImage(), making us # hard-depend on Framebuffer Objects. if not self.hasFrameBufferObjects(): Logger.log( "e", "Starup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical( None, "Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers." ) sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) if not self.hasFrameBufferObjects(): Logger.log( "w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._gl.glGetString(self._gl.GL_VERSION)) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def initializeGL(self): version = QOpenGLVersionProfile() version.setVersion(2, 0) self.gl = self.context().versionFunctions(version) self.gl.initializeOpenGLFunctions() self.setClearColor(self.trolltechPurple.darker()) self.object = self.makeObject() self.gl.glShadeModel(self.gl.GL_FLAT) self.gl.glEnable(self.gl.GL_DEPTH_TEST) self.gl.glEnable(self.gl.GL_CULL_FACE)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ assert QApplication.instance() override = os.environ.get('QUTE_FAKE_OPENGL_VENDOR') if override is not None: log.init.debug("Using override {}".format(override)) return override old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) except ImportError as e: log.init.debug("Importing version functions failed: {}".format(e)) return None if vf is None: log.init.debug("Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def initializeGL(self): version_profile = QOpenGLVersionProfile() version_profile.setVersion(2, 0) self.gl = self.context().versionFunctions(version_profile) self.gl.initializeOpenGLFunctions() self.setClearColor(self.trolltechPurple.darker()) self.object = self.makeObject() self.gl.glShadeModel(self.gl.GL_FLAT) self.gl.glEnable(self.gl.GL_DEPTH_TEST) self.gl.glEnable(self.gl.GL_CULL_FACE)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ assert QApplication.instance() old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("opengl_vendor: Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("opengl_vendor: Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) try: vf = ctx.versionFunctions(vp) except ImportError as e: log.init.debug("opengl_vendor: Importing version functions " "failed: {}".format(e)) return None if vf is None: log.init.debug("opengl_vendor: Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ # We're doing those imports here because this is only available with Qt 5.4 # or newer. from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface) assert QApplication.instance() old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("opengl_vendor: Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("opengl_vendor: Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) if vf is None: log.init.debug("opengl_vendor: Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ # We're doing those imports here because this is only available with Qt 5.4 # or newer. from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface) assert QApplication.instance() old_context = QOpenGLContext.currentContext() old_surface = None if old_context is None else old_context.surface() surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() if not ok: log.init.debug("opengl_vendor: Creating context failed!") return None ok = ctx.makeCurrent(surface) if not ok: log.init.debug("opengl_vendor: Making context current failed!") return None try: if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) if vf is None: log.init.debug("opengl_vendor: Getting version functions failed!") return None return vf.glGetString(vf.GL_VENDOR) finally: ctx.doneCurrent() if old_context and old_surface: old_context.makeCurrent(old_surface)
def initializeGL(self): version = QOpenGLVersionProfile() version.setVersion(2, 0) self.gl = self.context().versionFunctions(version) self.gl.initializeOpenGLFunctions() self.setClearColor(GLWidget.COLOR_BACKGROUND) # self.gl.glPolygonMode(self.gl.GL_FRONT_AND_BACK, self.gl.GL_LINE ) self.gl.glShadeModel(self.gl.GL_SMOOTH) self.gl.glEnable(self.gl.GL_DEPTH_TEST) self.gl.glEnable(self.gl.GL_CULL_FACE) # self.gl.glEnable(self.gl.GL_LIGHTING) # self.gl.glEnable(self.gl.GL_LIGHT0) self.gl.glEnable(self.gl.GL_MULTISAMPLE) self.gl.glEnable(self.gl.GL_BLEND) self.gl.glBlendFunc(self.gl.GL_SRC_ALPHA, self.gl.GL_ONE_MINUS_SRC_ALPHA)
def __init__(self): super(LogoRenderer, self).__init__() self.m_fAngle = None self.m_fScale = None self.vertices = [] self.normals = [] self.program1 = QOpenGLShaderProgram() self.vertexAttr1 = 0 self.normalAttr1 = 0 self.matrixUniform1 = 0 ver = QOpenGLVersionProfile() ver.setVersion(2, 1) cntx = QOpenGLContext.currentContext() #print("QOpenGLContext:", cntx, ver) fmt = cntx.format() fmt.setVersion(2, 1) cntx.setFormat(fmt) self.gl = cntx.versionFunctions(ver)
def __init__(self): super(LogoRenderer, self).__init__() self.m_fAngle = None self.m_fScale = None self.vertices = [] self.normals = [] self.program1 = QOpenGLShaderProgram() self.vertexAttr1 = 0 self.normalAttr1 = 0 self.matrixUniform1 = 0 ver = QOpenGLVersionProfile() ver.setVersion(2, 1) cntx = QOpenGLContext.currentContext() #print("QOpenGLContext:", cntx, ver) fmt = cntx.format() fmt.setVersion(2, 1) cntx.setFormat(fmt) self.gl = cntx.versionFunctions(ver)
def initializeGL(self): profile = QOpenGLVersionProfile() profile.setVersion(4, 1) profile.setProfile(QSurfaceFormat.CoreProfile) glClearColor(0.85, 0.85, 0.85, 1.0) self.program = self.init_shaders() vertices, normals, tex_coords, faces, material = read_obj(self.obj_path) self.bounding_box = get_bounding_box(vertices) self.model_offset = -(self.bounding_box[0] + 0.5 * (self.bounding_box[1] - self.bounding_box[0])) # Offset to move the model's center into 0,0,0 self.texture = load_texture(material) glEnable(GL_DEPTH_TEST) glEnable(GL_CULL_FACE) self.vertex_buf, self.index_buf = self.init_buffers(vertices, normals, tex_coords, faces) self.initialized = True
def __init__(self): profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical("Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers.") sys.exit(1) # It would be nice to be able to not necessarily need OpenGL Framebuffer Object support, but # due to a limiation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFramebufferObject::toImage(), making us # hard-depend on Framebuffer Objects. if not self.hasFrameBufferObjects(): Logger.log("e", "Starup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical("Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers.") sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) if not self.hasFrameBufferObjects(): Logger.log("w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._gl.glGetString(self._gl.GL_VERSION)) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
def initializeGL(self): version_profile = QOpenGLVersionProfile() version_profile.setVersion(2, 0) self.gl = self.context().versionFunctions(version_profile) self.gl.initializeOpenGLFunctions() lightPos = (5.0, 5.0, 10.0, 1.0) reflectance1 = (0.8, 0.1, 0.0, 1.0) reflectance2 = (0.0, 0.8, 0.2, 1.0) reflectance3 = (0.2, 0.2, 1.0, 1.0) self.gl.glLightfv(self.gl.GL_LIGHT0, self.gl.GL_POSITION, lightPos) self.gl.glEnable(self.gl.GL_LIGHTING) self.gl.glEnable(self.gl.GL_LIGHT0) self.gl.glEnable(self.gl.GL_DEPTH_TEST) self.gear1 = self.makeGear(reflectance1, 1.0, 4.0, 1.0, 0.7, 20) self.gear2 = self.makeGear(reflectance2, 0.5, 2.0, 2.0, 0.7, 10) self.gear3 = self.makeGear(reflectance3, 1.3, 2.0, 0.5, 0.7, 10) self.gl.glEnable(self.gl.GL_NORMALIZE) self.gl.glClearColor(0.0, 0.0, 0.0, 1.0)
def opengl_vendor(): # pragma: no cover """Get the OpenGL vendor used. This returns a string such as 'nouveau' or 'Intel Open Source Technology Center'; or None if the vendor can't be determined. """ # We're doing those imports here because this is only available with Qt 5.4 # or newer. from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface) assert QApplication.instance() assert QOpenGLContext.currentContext() is None surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() assert ok ok = ctx.makeCurrent(surface) assert ok if ctx.isOpenGLES(): # Can't use versionFunctions there return None vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) vendor = vf.glGetString(vf.GL_VENDOR) ctx.doneCurrent() return vendor
def _initialize(self) -> None: profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self._gl = QOpenGLContext.currentContext().versionFunctions(profile) self._gl.initializeOpenGLFunctions()
def renderNow(self): if not self.isExposed(): return self.m_update_pending = False needsInitialize = False if self.m_context is None: self.m_context = QOpenGLContext(self) self.m_context.setFormat(self.requestedFormat()) self.m_context.create() needsInitialize = True self.m_context.makeCurrent(self) if needsInitialize: # Sorry, no support for higher versions for now. profile = QOpenGLVersionProfile() profile.setVersion(2, 0) self.m_gl = self.m_context.versionFunctions(profile) self.m_gl.initializeOpenGLFunctions() #print(self.m_context.hasExtension('GL_EXT_framebuffer_object')) #print(self.m_context.hasExtension('GL_ARB_texture_float')) #print(*sorted(self.m_context.extensions()), sep='\n') # Small hack. Guess noone mind? import ctypes import ctypes.util GL = ctypes.CDLL(ctypes.util.find_library('GL')) self.addGlFunctuins(GL, { 'glFramebufferTexture2D': (ctypes.c_uint, ctypes.c_uint, ctypes.c_uint, ctypes.c_uint, ctypes.c_int) }) self.logger = QOpenGLDebugLogger() self.logger.initialize() self.logger.loggedMessages() self.logger.messageLogged.connect(self.handleLoggedMassage) self.logger.startLogging() self.initialize(self.m_gl) if not self.m_device: self.m_device = QOpenGLPaintDevice() self.m_gl.glClear(self.m_gl.GL_COLOR_BUFFER_BIT | self.m_gl.GL_DEPTH_BUFFER_BIT); self.m_device.setSize(self.size()) painter = QPainter(self.m_device) painter.beginNativePainting() self.render(self.m_gl) painter.endNativePainting() self.paint(painter) self.m_context.swapBuffers(self) if self.m_animating: self.renderLater()
from PyQt5.QtGui import (QOpenGLContext, QOpenGLVersionProfile, QOffscreenSurface, QGuiApplication) app = QGuiApplication([]) surface = QOffscreenSurface() surface.create() ctx = QOpenGLContext() ok = ctx.create() assert ok ok = ctx.makeCurrent(surface) assert ok print(f"GLES: {ctx.isOpenGLES()}") vp = QOpenGLVersionProfile() vp.setVersion(2, 0) vf = ctx.versionFunctions(vp) print(f"Vendor: {vf.glGetString(vf.GL_VENDOR)}") print(f"Renderer: {vf.glGetString(vf.GL_RENDERER)}") print(f"Version: {vf.glGetString(vf.GL_VERSION)}") print( f"Shading language version: {vf.glGetString(vf.GL_SHADING_LANGUAGE_VERSION)}" ) ctx.doneCurrent()
def detectBestOpenGLVersion( cls, force_compatability: bool ) -> Tuple[Optional[int], Optional[int], Optional[int]]: """Return "best" OpenGL to use, 4.1 core or 2.0. result is <major_version>, <minor_version>, <profile> The version depends on what versions are supported in Qt (4.1 and 2.0) and what the GPU supports. If creating a context fails at all, (None, None, None) is returned Note that PyQt only supports 4.1, 2.1 and 2.0. Cura omits support for 2.1, so the only returned options are 4.1 and 2.0. """ cls.detect_ogl_context = None if not force_compatability: Logger.log("d", "Trying OpenGL context 4.1...") cls.detect_ogl_context = cls.setContext(4, 1, core=True) if cls.detect_ogl_context is not None: fmt = cls.detect_ogl_context.format() profile = fmt.profile() # First test: we hope for this if ((fmt.majorVersion() == 4 and fmt.minorVersion() >= 1) or (fmt.majorVersion() > 4)) and profile == QSurfaceFormat.CoreProfile: Logger.log( "d", "Yay, we got at least OpenGL 4.1 core: %s", cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(), profile)) # https://riverbankcomputing.com/pipermail/pyqt/2017-January/038640.html # PyQt currently only implements 2.0, 2.1 and 4.1Core # If eg 4.5Core would be detected and used here, PyQt would not be able to handle it. major_version = 4 minor_version = 1 # CURA-6092: Check if we're not using software backed 4.1 context; A software 4.1 context # is much slower than a hardware backed 2.0 context # Check for OS, Since this only seems to happen on specific versions of Mac OSX and # the workaround (which involves the deletion of an OpenGL context) is a problem for some Intel drivers. if not Platform.isOSX(): return major_version, minor_version, QSurfaceFormat.CoreProfile gl_window = QWindow() gl_window.setSurfaceType(QWindow.OpenGLSurface) gl_window.showMinimized() cls.detect_ogl_context.makeCurrent(gl_window) gl_profile = QOpenGLVersionProfile() gl_profile.setVersion(major_version, minor_version) gl_profile.setProfile(profile) gl = cls.detect_ogl_context.versionFunctions( gl_profile ) # type: Any #It's actually a protected class in PyQt that depends on the requested profile and the implementation of your graphics card. gpu_type = "Unknown" # type: str result = None if gl: result = gl.initializeOpenGLFunctions() if not result: Logger.log("e", "Could not initialize OpenGL to get gpu type") else: # WORKAROUND: Cura/#1117 Cura-packaging/12 # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5. # This workaround makes the code fall back to a "Unknown" renderer in these cases. try: gpu_type = gl.glGetString(gl.GL_RENDERER) except UnicodeDecodeError: Logger.log( "e", "DecodeError while getting GL_RENDERER via glGetString!" ) Logger.log("d", "OpenGL renderer type for this OpenGL version: %s", gpu_type) if "software" in gpu_type.lower(): Logger.log( "w", "Unfortunately OpenGL 4.1 uses software rendering") else: return major_version, minor_version, QSurfaceFormat.CoreProfile else: Logger.log("d", "Failed to create OpenGL context 4.1.") # Fallback: check min spec Logger.log("d", "Trying OpenGL context 2.0...") cls.detect_ogl_context = cls.setContext( 2, 0, profile=QSurfaceFormat.NoProfile) if cls.detect_ogl_context is not None: fmt = cls.detect_ogl_context.format() profile = fmt.profile() if fmt.majorVersion() >= 2 and fmt.minorVersion() >= 0: Logger.log( "d", "We got at least OpenGL context 2.0: %s", cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(), profile)) return 2, 0, QSurfaceFormat.NoProfile else: Logger.log( "d", "Current OpenGL context is too low: %s" % cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(), profile)) return None, None, None else: Logger.log("d", "Failed to create OpenGL context 2.0.") return None, None, None
vertices = [ # vertex coordinates texture coordinates -1.0, -1.0, 0.5, 1.0, 0.0, 1.0, -1.0, 1.0, 0.5, 1.0, 0.0, 0.0, 1.0, 1.0, 0.5, 1.0, 1.0, 0.0, -1.0, -1.0, 0.5, 1.0, 0.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 0.0, 1.0, -1.0, 0.5, 1.0, 1.0, 1.0, ] glVersionProfile = QOpenGLVersionProfile() glVersionProfile.setVersion(2, 1) class DDSWidget(QOpenGLWidget): def __init__(self, ddsFile, debugContext = False, parent = None, f = Qt.WindowFlags()): super(DDSWidget, self).__init__(parent, f) self.ddsFile = ddsFile self.clean = True self.logger = None self.program = None self.transparecyProgram = None self.texture = None self.vbo = None
def detectBestOpenGLVersion(cls): Logger.log("d", "Trying OpenGL context 4.1...") ctx = cls.setContext(4, 1, core=True) if ctx is not None: fmt = ctx.format() profile = fmt.profile() # First test: we hope for this if ((fmt.majorVersion() == 4 and fmt.minorVersion() >= 1) or (fmt.majorVersion() > 4)) and profile == QSurfaceFormat.CoreProfile: Logger.log( "d", "Yay, we got at least OpenGL 4.1 core: %s", cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(), profile)) # https://riverbankcomputing.com/pipermail/pyqt/2017-January/038640.html # PyQt currently only implements 2.0, 2.1 and 4.1Core # If eg 4.5Core would be detected and used here, PyQt would not be able to handle it. major_version = 4 minor_version = 1 # CURA-6092: Check if we're not using software backed 4.1 context; A software 4.1 context # is much slower than a hardware backed 2.0 context gl_window = QWindow() gl_window.setSurfaceType(QWindow.OpenGLSurface) gl_window.showMinimized() gl_format = QSurfaceFormat() gl_format.setMajorVersion(major_version) gl_format.setMinorVersion(minor_version) gl_format.setProfile(profile) gl_context = QOpenGLContext() gl_context.setFormat(gl_format) gl_context.create() gl_context.makeCurrent(gl_window) gl_profile = QOpenGLVersionProfile() gl_profile.setVersion(major_version, minor_version) gl_profile.setProfile(profile) gl = gl_context.versionFunctions( gl_profile ) # type: Any #It's actually a protected class in PyQt that depends on the requested profile and the implementation of your graphics card. gpu_type = "Unknown" # type: str result = None if gl: result = gl.initializeOpenGLFunctions() if not result: Logger.log("e", "Could not initialize OpenGL to get gpu type") else: # WORKAROUND: Cura/#1117 Cura-packaging/12 # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5. # This workaround makes the code fall back to a "Unknown" renderer in these cases. try: gpu_type = gl.glGetString(gl.GL_RENDERER) #type: str except UnicodeDecodeError: Logger.log( "e", "DecodeError while getting GL_RENDERER via glGetString!" ) Logger.log("d", "OpenGL renderer type for this OpenGL version: %s", gpu_type) if "software" in gpu_type.lower(): Logger.log( "w", "Unfortunately OpenGL 4.1 uses software rendering") else: return major_version, minor_version, QSurfaceFormat.CoreProfile else: Logger.log("d", "Failed to create OpenGL context 4.1.") # Fallback: check min spec Logger.log("d", "Trying OpenGL context 2.0...") ctx = cls.setContext(2, 0, profile=QSurfaceFormat.NoProfile) if ctx is not None: fmt = ctx.format() profile = fmt.profile() if fmt.majorVersion() >= 2 and fmt.minorVersion() >= 0: Logger.log( "d", "We got at least OpenGL context 2.0: %s", cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(), profile)) return 2, 0, QSurfaceFormat.NoProfile else: Logger.log( "d", "Current OpenGL context is too low: %s" % cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(), profile)) return None, None, None else: Logger.log("d", "Failed to create OpenGL context 2.0.") return None, None, None
class OpenGLApp(QMainWindow): """Main window.""" def __init__(self, versionprofile=None, *args, **kwargs): """Initialize with an OpenGL Widget.""" super(OpenGLApp, self).__init__(*args, **kwargs) self.widget = QOpenGLControllerWidget(versionprofile=versionprofile) self.setMinimumSize(400, 400) self.setCentralWidget(self.widget) self.show() if __name__ == '__main__': import sys fmt = QSurfaceFormat() fmt.setVersion(4, 1) fmt.setProfile(QSurfaceFormat.CoreProfile) fmt.setSamples(4) fmt.setSwapInterval(0) QSurfaceFormat.setDefaultFormat(fmt) vp = QOpenGLVersionProfile() vp.setVersion(4, 1) vp.setProfile(QSurfaceFormat.CoreProfile) app = QApplication(sys.argv) window = OpenGLApp(versionprofile=vp) window.show() sys.exit(app.exec_())
def __init__(self) -> None: if OpenGL.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) OpenGL.__instance = self super().__init__() profile = QOpenGLVersionProfile() profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version) profile.setProfile(OpenGLContext.profile) context = QOpenGLContext.currentContext() if not context: Logger.log( "e", "Startup failed due to OpenGL context creation failing") QMessageBox.critical( None, i18n_catalog.i18nc( "@message", "Failed to Initialize OpenGL", "Could not initialize an OpenGL context. This program requires OpenGL 2.0 or higher. Please check your video card drivers." )) sys.exit(1) self._gl = context.versionFunctions( profile ) # type: Any #It's actually a protected class in PyQt that depends on the implementation of your graphics card. if not self._gl: Logger.log("e", "Startup failed due to OpenGL initialization failing") QMessageBox.critical( None, i18n_catalog.i18nc( "@message", "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers." )) sys.exit(1) # It would be nice to be able to not necessarily need OpenGL FrameBuffer Object support, but # due to a limitation in PyQt, currently glReadPixels or similar methods are not available. # This means we can only get frame buffer contents through methods that indirectly call # those methods, in this case primarily QOpenGLFrameBufferObject::toImage(), making us # hard-depend on FrameBuffer Objects. if not self.hasFrameBufferObjects(): Logger.log( "e", "Startup failed, OpenGL does not support Frame Buffer Objects") QMessageBox.critical( None, i18n_catalog.i18nc( "Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers." )) sys.exit(1) self._gl.initializeOpenGLFunctions() self._gpu_vendor = OpenGL.Vendor.Other #type: int vendor_string = self._gl.glGetString(self._gl.GL_VENDOR) if vendor_string is None: vendor_string = "Unknown" vendor_string = vendor_string.lower() if "nvidia" in vendor_string: self._gpu_vendor = OpenGL.Vendor.NVidia elif "amd" in vendor_string or "ati" in vendor_string: self._gpu_vendor = OpenGL.Vendor.AMD elif "intel" in vendor_string: self._gpu_vendor = OpenGL.Vendor.Intel self._gpu_type = "Unknown" # type: str # WORKAROUND: Cura/#1117 Cura-packaging/12 # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5. # This workaround makes the code fall back to a "Unknown" renderer in these cases. try: self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) except UnicodeDecodeError: Logger.log( "e", "DecodeError while getting GL_RENDERER via glGetString!") self._opengl_version = self._gl.glGetString( self._gl.GL_VERSION) #type: str if not self.hasFrameBufferObjects(): Logger.log( "w", "No frame buffer support, falling back to texture copies.") Logger.log("d", "Initialized OpenGL subsystems.") Logger.log("d", "OpenGL Version: %s", self._opengl_version) Logger.log("d", "OpenGL Vendor: %s", self._gl.glGetString(self._gl.GL_VENDOR)) Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)