Beispiel #1
0
    def __init__(self) -> None:
        if OpenGL.__instance is not None:
            raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__)
        OpenGL.__instance = self

        super().__init__()

        profile = QOpenGLVersionProfile()
        profile.setVersion(OpenGLContext.major_version, OpenGLContext.minor_version)
        profile.setProfile(OpenGLContext.profile)

        self._gl = QOpenGLContext.currentContext().versionFunctions(profile) # type: Any #It's actually a protected class in PyQt that depends on the implementation of your graphics card.
        if not self._gl:
            Logger.log("e", "Startup failed due to OpenGL initialization failing")
            QMessageBox.critical(None, i18n_catalog.i18nc("@message", "Failed to Initialize OpenGL", "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers."))
            sys.exit(1)

        # It would be nice to be able to not necessarily need OpenGL FrameBuffer Object support, but
        # due to a limitation in PyQt, currently glReadPixels or similar methods are not available.
        # This means we can only get frame buffer contents through methods that indirectly call
        # those methods, in this case primarily QOpenGLFrameBufferObject::toImage(), making us
        # hard-depend on FrameBuffer Objects.
        if not self.hasFrameBufferObjects():
            Logger.log("e", "Startup failed, OpenGL does not support Frame Buffer Objects")
            QMessageBox.critical(None, i18n_catalog.i18nc("Critical OpenGL Extensions Missing", "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers."))
            sys.exit(1)

        self._gl.initializeOpenGLFunctions()

        self._gpu_vendor = OpenGL.Vendor.Other #type: int
        vendor_string = self._gl.glGetString(self._gl.GL_VENDOR)
        if vendor_string is None:
            vendor_string = "Unknown"
        vendor_string = vendor_string.lower()

        if "nvidia" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.NVidia
        elif "amd" in vendor_string or "ati" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.AMD
        elif "intel" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.Intel

        # WORKAROUND: Cura/#1117 Cura-packaging/12
        # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5.
        # This workaround makes the code fall back to a "Unknown" renderer in these cases.
        try:
            self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER) #type: str
        except UnicodeDecodeError:
            Logger.log("e", "DecodeError while getting GL_RENDERER via glGetString!")
            self._gpu_type = "Unknown" #type: str

        self._opengl_version = self._gl.glGetString(self._gl.GL_VERSION) #type: str

        if not self.hasFrameBufferObjects():
            Logger.log("w", "No frame buffer support, falling back to texture copies.")

        Logger.log("d", "Initialized OpenGL subsystems.")
        Logger.log("d", "OpenGL Version:  %s", self._opengl_version)
        Logger.log("d", "OpenGL Vendor:   %s", self._gl.glGetString(self._gl.GL_VENDOR))
        Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
Beispiel #2
0
    def initializeGL(self):
        if not has_libGL:
            return

        profile = QOpenGLVersionProfile()
        profile.setVersion(4, 1)
        profile.setProfile(QSurfaceFormat.CoreProfile)

        glClearColor(0.85, 0.85, 0.85, 1.0)
        self.program = self.init_shaders()

        vertices, normals, tex_coords, faces, material = read_obj(
            self.obj_path)

        self.bounding_box = get_bounding_box(vertices)
        self.model_offset = -(self.bounding_box[0] + 0.5 *
                              (self.bounding_box[1] - self.bounding_box[0])
                              )  # Offset to move the model's center into 0,0,0

        self.texture = load_texture(material)
        glEnable(GL_DEPTH_TEST)
        glEnable(GL_CULL_FACE)

        self.vertex_buf, self.index_buf = self.init_buffers(
            vertices, normals, tex_coords, faces)
        self.initialized = True
Beispiel #3
0
    def initializeGL(self):
        version_profile = QOpenGLVersionProfile()
        version_profile.setVersion(4, 1)
        version_profile.setProfile(QSurfaceFormat.CoreProfile)
        self.gl = self.context().versionFunctions(version_profile)
        if not self.gl:
            raise RuntimeError("unable to apply OpenGL version profile")
        self.gl.initializeOpenGLFunctions()

        self.open_file('settings.xml')

        self.point_cloud_render = PC.PointCloudRender()

        self.sensor_thread = RSXML.load('settings.xml')
        #self.sensor_thread = API.RealSenseThread(1, 'RealSenseThread')
        self.sensor_thread.connect(self.point_cloud_render)
        self.motor_control = MCXML.load('settings.xml')
        self.sensor_thread.motor_control = self.motor_control

        self.message_bot = MBXML.load('settings.xml')
        if self.message_bot is not None:
            self.message_bot.sensor = self.sensor_thread
            self.message_bot.start()
            self.sensor_thread.bot = self.message_bot

        self.sensor_thread.start()

        glClearColor(self.clear_color[0], self.clear_color[1],
                     self.clear_color[2], 0.0)
        glEnable(GL_DEPTH_TEST)
        glDepthFunc(GL_LESS)
        glEnable(GL_CULL_FACE)
Beispiel #4
0
    def __init__(self):
        profile = QOpenGLVersionProfile()
        profile.setVersion(OpenGLContext.major_version,
                           OpenGLContext.minor_version)
        profile.setProfile(OpenGLContext.profile)

        self._gl = QOpenGLContext.currentContext().versionFunctions(profile)
        if not self._gl:
            Logger.log("e",
                       "Startup failed due to OpenGL initialization failing")
            QMessageBox.critical(
                None, "Failed to Initialize OpenGL",
                "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers."
            )
            sys.exit(1)

        # It would be nice to be able to not necessarily need OpenGL Framebuffer Object support, but
        # due to a limiation in PyQt, currently glReadPixels or similar methods are not available.
        # This means we can only get frame buffer contents through methods that indirectly call
        # those methods, in this case primarily QOpenGLFramebufferObject::toImage(), making us
        # hard-depend on Framebuffer Objects.
        if not self.hasFrameBufferObjects():
            Logger.log(
                "e",
                "Starup failed, OpenGL does not support Frame Buffer Objects")
            QMessageBox.critical(
                None, "Critical OpenGL Extensions Missing",
                "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers."
            )
            sys.exit(1)

        self._gl.initializeOpenGLFunctions()

        self._gpu_vendor = OpenGL.Vendor.Other
        vendor_string = self._gl.glGetString(self._gl.GL_VENDOR)
        if vendor_string is None:
            vendor_string = "Unknown"
        vendor_string = vendor_string.lower()
        if "nvidia" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.NVidia
        elif "amd" in vendor_string or "ati" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.AMD
        elif "intel" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.Intel

        self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER)

        if not self.hasFrameBufferObjects():
            Logger.log(
                "w",
                "No frame buffer support, falling back to texture copies.")

        Logger.log("d", "Initialized OpenGL subsystems.")
        Logger.log("d", "OpenGL Version:  %s",
                   self._gl.glGetString(self._gl.GL_VERSION))
        Logger.log("d", "OpenGL Vendor:   %s",
                   self._gl.glGetString(self._gl.GL_VENDOR))
        Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
Beispiel #5
0
    def initializeGL(self):
        profile = QOpenGLVersionProfile()
        profile.setVersion(4, 1)
        profile.setProfile(QSurfaceFormat.CoreProfile)

        glClearColor(0.85, 0.85, 0.85, 1.0)
        self.program = self.init_shaders()

        vertices, normals, tex_coords, faces, material = read_obj(self.obj_path)

        self.bounding_box = get_bounding_box(vertices)
        self.model_offset = -(self.bounding_box[0] + 0.5 * (self.bounding_box[1] - self.bounding_box[0])) # Offset to move the model's center into 0,0,0

        self.texture = load_texture(material)
        glEnable(GL_DEPTH_TEST)
        glEnable(GL_CULL_FACE)

        self.vertex_buf, self.index_buf = self.init_buffers(vertices, normals, tex_coords, faces)
        self.initialized = True
Beispiel #6
0
    def detectBestOpenGLVersion(
        cls, force_compatability: bool
    ) -> Tuple[Optional[int], Optional[int], Optional[int]]:
        """Return "best" OpenGL to use, 4.1 core or 2.0.

        result is <major_version>, <minor_version>, <profile>
        The version depends on what versions are supported in Qt (4.1 and 2.0) and what
        the GPU supports. If creating a context fails at all, (None, None, None) is returned
        Note that PyQt only supports 4.1, 2.1 and 2.0. Cura omits support for 2.1, so the
        only returned options are 4.1 and 2.0.
        """
        cls.detect_ogl_context = None
        if not force_compatability:
            Logger.log("d", "Trying OpenGL context 4.1...")
            cls.detect_ogl_context = cls.setContext(4, 1, core=True)
        if cls.detect_ogl_context is not None:
            fmt = cls.detect_ogl_context.format()
            profile = fmt.profile()

            # First test: we hope for this
            if ((fmt.majorVersion() == 4 and fmt.minorVersion() >= 1) or
                (fmt.majorVersion() >
                 4)) and profile == QSurfaceFormat.CoreProfile:
                Logger.log(
                    "d", "Yay, we got at least OpenGL 4.1 core: %s",
                    cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(),
                                      profile))

                # https://riverbankcomputing.com/pipermail/pyqt/2017-January/038640.html
                # PyQt currently only implements 2.0, 2.1 and 4.1Core
                # If eg 4.5Core would be detected and used here, PyQt would not be able to handle it.
                major_version = 4
                minor_version = 1

                # CURA-6092: Check if we're not using software backed 4.1 context; A software 4.1 context
                # is much slower than a hardware backed 2.0 context
                # Check for OS, Since this only seems to happen on specific versions of Mac OSX and
                # the workaround (which involves the deletion of an OpenGL context) is a problem for some Intel drivers.
                if not Platform.isOSX():
                    return major_version, minor_version, QSurfaceFormat.CoreProfile

                gl_window = QWindow()
                gl_window.setSurfaceType(QWindow.OpenGLSurface)
                gl_window.showMinimized()

                cls.detect_ogl_context.makeCurrent(gl_window)

                gl_profile = QOpenGLVersionProfile()
                gl_profile.setVersion(major_version, minor_version)
                gl_profile.setProfile(profile)

                gl = cls.detect_ogl_context.versionFunctions(
                    gl_profile
                )  # type: Any #It's actually a protected class in PyQt that depends on the requested profile and the implementation of your graphics card.

                gpu_type = "Unknown"  # type: str

                result = None
                if gl:
                    result = gl.initializeOpenGLFunctions()

                if not result:
                    Logger.log("e",
                               "Could not initialize OpenGL to get gpu type")
                else:
                    # WORKAROUND: Cura/#1117 Cura-packaging/12
                    # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5.
                    # This workaround makes the code fall back to a "Unknown" renderer in these cases.
                    try:
                        gpu_type = gl.glGetString(gl.GL_RENDERER)
                    except UnicodeDecodeError:
                        Logger.log(
                            "e",
                            "DecodeError while getting GL_RENDERER via glGetString!"
                        )

                Logger.log("d",
                           "OpenGL renderer type for this OpenGL version: %s",
                           gpu_type)
                if "software" in gpu_type.lower():
                    Logger.log(
                        "w",
                        "Unfortunately OpenGL 4.1 uses software rendering")
                else:
                    return major_version, minor_version, QSurfaceFormat.CoreProfile
        else:
            Logger.log("d", "Failed to create OpenGL context 4.1.")

        # Fallback: check min spec
        Logger.log("d", "Trying OpenGL context 2.0...")
        cls.detect_ogl_context = cls.setContext(
            2, 0, profile=QSurfaceFormat.NoProfile)
        if cls.detect_ogl_context is not None:
            fmt = cls.detect_ogl_context.format()
            profile = fmt.profile()

            if fmt.majorVersion() >= 2 and fmt.minorVersion() >= 0:
                Logger.log(
                    "d", "We got at least OpenGL context 2.0: %s",
                    cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(),
                                      profile))
                return 2, 0, QSurfaceFormat.NoProfile
            else:
                Logger.log(
                    "d", "Current OpenGL context is too low: %s" %
                    cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(),
                                      profile))
                return None, None, None
        else:
            Logger.log("d", "Failed to create OpenGL context 2.0.")
            return None, None, None
Beispiel #7
0
    def detectBestOpenGLVersion(cls):
        Logger.log("d", "Trying OpenGL context 4.1...")
        ctx = cls.setContext(4, 1, core=True)
        if ctx is not None:
            fmt = ctx.format()
            profile = fmt.profile()

            # First test: we hope for this
            if ((fmt.majorVersion() == 4 and fmt.minorVersion() >= 1) or
                (fmt.majorVersion() >
                 4)) and profile == QSurfaceFormat.CoreProfile:
                Logger.log(
                    "d", "Yay, we got at least OpenGL 4.1 core: %s",
                    cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(),
                                      profile))

                # https://riverbankcomputing.com/pipermail/pyqt/2017-January/038640.html
                # PyQt currently only implements 2.0, 2.1 and 4.1Core
                # If eg 4.5Core would be detected and used here, PyQt would not be able to handle it.
                major_version = 4
                minor_version = 1

                # CURA-6092: Check if we're not using software backed 4.1 context; A software 4.1 context
                # is much slower than a hardware backed 2.0 context
                gl_window = QWindow()
                gl_window.setSurfaceType(QWindow.OpenGLSurface)
                gl_window.showMinimized()

                gl_format = QSurfaceFormat()
                gl_format.setMajorVersion(major_version)
                gl_format.setMinorVersion(minor_version)
                gl_format.setProfile(profile)

                gl_context = QOpenGLContext()
                gl_context.setFormat(gl_format)
                gl_context.create()
                gl_context.makeCurrent(gl_window)

                gl_profile = QOpenGLVersionProfile()
                gl_profile.setVersion(major_version, minor_version)
                gl_profile.setProfile(profile)

                gl = gl_context.versionFunctions(
                    gl_profile
                )  # type: Any #It's actually a protected class in PyQt that depends on the requested profile and the implementation of your graphics card.

                gpu_type = "Unknown"  # type: str

                result = None
                if gl:
                    result = gl.initializeOpenGLFunctions()

                if not result:
                    Logger.log("e",
                               "Could not initialize OpenGL to get gpu type")
                else:
                    # WORKAROUND: Cura/#1117 Cura-packaging/12
                    # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5.
                    # This workaround makes the code fall back to a "Unknown" renderer in these cases.
                    try:
                        gpu_type = gl.glGetString(gl.GL_RENDERER)  #type: str
                    except UnicodeDecodeError:
                        Logger.log(
                            "e",
                            "DecodeError while getting GL_RENDERER via glGetString!"
                        )

                Logger.log("d",
                           "OpenGL renderer type for this OpenGL version: %s",
                           gpu_type)
                if "software" in gpu_type.lower():
                    Logger.log(
                        "w",
                        "Unfortunately OpenGL 4.1 uses software rendering")
                else:
                    return major_version, minor_version, QSurfaceFormat.CoreProfile
        else:
            Logger.log("d", "Failed to create OpenGL context 4.1.")

        # Fallback: check min spec
        Logger.log("d", "Trying OpenGL context 2.0...")
        ctx = cls.setContext(2, 0, profile=QSurfaceFormat.NoProfile)
        if ctx is not None:
            fmt = ctx.format()
            profile = fmt.profile()

            if fmt.majorVersion() >= 2 and fmt.minorVersion() >= 0:
                Logger.log(
                    "d", "We got at least OpenGL context 2.0: %s",
                    cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(),
                                      profile))
                return 2, 0, QSurfaceFormat.NoProfile
            else:
                Logger.log(
                    "d", "Current OpenGL context is too low: %s" %
                    cls.versionAsText(fmt.majorVersion(), fmt.minorVersion(),
                                      profile))
                return None, None, None
        else:
            Logger.log("d", "Failed to create OpenGL context 2.0.")
            return None, None, None
Beispiel #8
0
    def __init__(self) -> None:
        if OpenGL.__instance is not None:
            raise RuntimeError("Try to create singleton '%s' more than once" %
                               self.__class__.__name__)
        OpenGL.__instance = self

        super().__init__()

        profile = QOpenGLVersionProfile()
        profile.setVersion(OpenGLContext.major_version,
                           OpenGLContext.minor_version)
        profile.setProfile(OpenGLContext.profile)

        context = QOpenGLContext.currentContext()
        if not context:
            Logger.log(
                "e", "Startup failed due to OpenGL context creation failing")
            QMessageBox.critical(
                None,
                i18n_catalog.i18nc(
                    "@message", "Failed to Initialize OpenGL",
                    "Could not initialize an OpenGL context. This program requires OpenGL 2.0 or higher. Please check your video card drivers."
                ))
            sys.exit(1)
        self._gl = context.versionFunctions(
            profile
        )  # type: Any #It's actually a protected class in PyQt that depends on the implementation of your graphics card.
        if not self._gl:
            Logger.log("e",
                       "Startup failed due to OpenGL initialization failing")
            QMessageBox.critical(
                None,
                i18n_catalog.i18nc(
                    "@message", "Failed to Initialize OpenGL",
                    "Could not initialize OpenGL. This program requires OpenGL 2.0 or higher. Please check your video card drivers."
                ))
            sys.exit(1)

        # It would be nice to be able to not necessarily need OpenGL FrameBuffer Object support, but
        # due to a limitation in PyQt, currently glReadPixels or similar methods are not available.
        # This means we can only get frame buffer contents through methods that indirectly call
        # those methods, in this case primarily QOpenGLFrameBufferObject::toImage(), making us
        # hard-depend on FrameBuffer Objects.
        if not self.hasFrameBufferObjects():
            Logger.log(
                "e",
                "Startup failed, OpenGL does not support Frame Buffer Objects")
            QMessageBox.critical(
                None,
                i18n_catalog.i18nc(
                    "Critical OpenGL Extensions Missing",
                    "Critical OpenGL extensions are missing. This program requires support for Framebuffer Objects. Please check your video card drivers."
                ))
            sys.exit(1)

        self._gl.initializeOpenGLFunctions()

        self._gpu_vendor = OpenGL.Vendor.Other  #type: int
        vendor_string = self._gl.glGetString(self._gl.GL_VENDOR)
        if vendor_string is None:
            vendor_string = "Unknown"
        vendor_string = vendor_string.lower()

        if "nvidia" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.NVidia
        elif "amd" in vendor_string or "ati" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.AMD
        elif "intel" in vendor_string:
            self._gpu_vendor = OpenGL.Vendor.Intel

        self._gpu_type = "Unknown"  # type: str
        # WORKAROUND: Cura/#1117 Cura-packaging/12
        # Some Intel GPU chipsets return a string, which is not undecodable via PyQt5.
        # This workaround makes the code fall back to a "Unknown" renderer in these cases.
        try:
            self._gpu_type = self._gl.glGetString(self._gl.GL_RENDERER)
        except UnicodeDecodeError:
            Logger.log(
                "e", "DecodeError while getting GL_RENDERER via glGetString!")

        self._opengl_version = self._gl.glGetString(
            self._gl.GL_VERSION)  #type: str

        if not self.hasFrameBufferObjects():
            Logger.log(
                "w",
                "No frame buffer support, falling back to texture copies.")

        Logger.log("d", "Initialized OpenGL subsystems.")
        Logger.log("d", "OpenGL Version:  %s", self._opengl_version)
        Logger.log("d", "OpenGL Vendor:   %s",
                   self._gl.glGetString(self._gl.GL_VENDOR))
        Logger.log("d", "OpenGL Renderer: %s", self._gpu_type)
Beispiel #9
0
class OpenGLApp(QMainWindow):
    """Main window."""
    def __init__(self, versionprofile=None, *args, **kwargs):
        """Initialize with an OpenGL Widget."""
        super(OpenGLApp, self).__init__(*args, **kwargs)

        self.widget = QOpenGLControllerWidget(versionprofile=versionprofile)
        self.setMinimumSize(400, 400)
        self.setCentralWidget(self.widget)
        self.show()


if __name__ == '__main__':
    import sys

    fmt = QSurfaceFormat()
    fmt.setVersion(4, 1)
    fmt.setProfile(QSurfaceFormat.CoreProfile)
    fmt.setSamples(4)
    fmt.setSwapInterval(0)
    QSurfaceFormat.setDefaultFormat(fmt)

    vp = QOpenGLVersionProfile()
    vp.setVersion(4, 1)
    vp.setProfile(QSurfaceFormat.CoreProfile)

    app = QApplication(sys.argv)
    window = OpenGLApp(versionprofile=vp)
    window.show()
    sys.exit(app.exec_())