def _create_common_defines(self):
        """ Creates commonly used defines for the shader auto config """
        defines = self.stage_mgr.defines

        # 3D viewport size
        defines["WINDOW_WIDTH"] = Globals.resolution.x
        defines["WINDOW_HEIGHT"] = Globals.resolution.y

        # Actual window size - might differ for supersampling
        defines["NATIVE_WINDOW_WIDTH"] = Globals.base.win.get_x_size()
        defines["NATIVE_WINDOW_HEIGHT"] = Globals.base.win.get_y_size()

        # Pass camera near and far plane
        defines["CAMERA_NEAR"] = round(Globals.base.camLens.get_near(), 10)
        defines["CAMERA_FAR"] = round(Globals.base.camLens.get_far(), 10)

        # Work arround buggy nvidia driver, which expects arrays to be const
        if "NVIDIA 361.43" in self._showbase.win.get_gsg().get_driver_version(
        ):
            defines["CONST_ARRAY"] = "const"
        else:
            defines["CONST_ARRAY"] = ""

        # Provide driver vendor as a default
        vendor = self._showbase.win.get_gsg().get_driver_vendor().lower()
        if "nvidia" in vendor:
            defines["IS_NVIDIA"] = 1
        if "ati" in vendor:
            defines["IS_AMD"] = 1
        if "intel" in vendor:
            defines["IS_INTEL"] = 1

        defines["REFERENCE_MODE"] = self.settings["pipeline.reference_mode"]

        # Only activate this experimental feature if the patch was applied,
        # since it is a local change in my Panda3D build which is not yet
        # reviewed by rdb. Once it is in public Panda3D Dev-Builds this will
        # be the default.
        if (not isfile(
                "/$$rp/data/panda3d_patches/prev-model-view-matrix.diff")
                or isfile("D:/__dev__")):

            # You can find the required patch in
            # data/panda3d_patches/prev-model-view-matrix.diff.
            # Delete it after you applied it, so the render pipeline knows the
            # patch is available.
            # self.warn("Experimental feature activated, no guarantee it works!")
            # defines["EXPERIMENTAL_PREV_TRANSFORM"] = 1
            pass

        self.light_mgr.init_defines()
        self.plugin_mgr.init_defines()
    def _create_common_defines(self):
        """ Creates commonly used defines for the shader auto config """
        defines = self.stage_mgr.defines

        # 3D viewport size
        defines["WINDOW_WIDTH"] = Globals.resolution.x
        defines["WINDOW_HEIGHT"] = Globals.resolution.y

        # Actual window size - might differ for supersampling
        defines["NATIVE_WINDOW_WIDTH"] = Globals.base.win.get_x_size()
        defines["NATIVE_WINDOW_HEIGHT"] = Globals.base.win.get_y_size()

        # Pass camera near and far plane
        defines["CAMERA_NEAR"] = round(Globals.base.camLens.get_near(), 10)
        defines["CAMERA_FAR"] = round(Globals.base.camLens.get_far(), 10)

        # Work arround buggy nvidia driver, which expects arrays to be const
        if "NVIDIA 361.43" in self._showbase.win.get_gsg().get_driver_version():
            defines["CONST_ARRAY"] = "const"
        else:
            defines["CONST_ARRAY"] = ""

        # Provide driver vendor as a default
        vendor = self._showbase.win.get_gsg().get_driver_vendor().lower()
        if "nvidia" in vendor:
            defines["IS_NVIDIA"] = 1
        if "ati" in vendor:
            defines["IS_AMD"] = 1
        if "intel" in vendor:
            defines["IS_INTEL"] = 1

        defines["REFERENCE_MODE"] = self.settings["pipeline.reference_mode"]

        # Only activate this experimental feature if the patch was applied,
        # since it is a local change in my Panda3D build which is not yet
        # reviewed by rdb. Once it is in public Panda3D Dev-Builds this will
        # be the default.
        if (not isfile("/$$rp/data/panda3d_patches/prev-model-view-matrix.diff") or
                isfile("D:/__dev__")):

            # You can find the required patch in
            # data/panda3d_patches/prev-model-view-matrix.diff.
            # Delete it after you applied it, so the render pipeline knows the
            # patch is available.
            # self.warn("Experimental feature activated, no guarantee it works!")
            # defines["EXPERIMENTAL_PREV_TRANSFORM"] = 1
            pass
            
        self.light_mgr.init_defines()
        self.plugin_mgr.init_defines()
    def _on_exit_cleanup(self):
        """ Gets called when the application exists """

        if self._do_cleanup:
            self.debug("Cleaning up ..")

            if self._write_path is not None:

                # Try removing the lockfile
                self._try_remove(self._lock_file)

                # Check for further tempfiles in the write path
                # We explicitely use os.listdir here instead of pandas listdir,
                # to work with actual paths
                for fname in os.listdir(self._write_path):
                    pth = join(self._write_path, fname)

                    # Tempfiles from the pipeline start with "$$" to distinguish
                    # them from user created files
                    if isfile(pth) and fname.startswith("$$"):
                        self._try_remove(pth)

                # Delete the write path if no files are left
                if len(os.listdir(self._write_path)) < 1:
                    try:
                        os.removedirs(self._write_path)
                    except IOError:
                        pass
    def load_from_file(self, filename):
        """ Attempts to load settings from a given file. When the file
        does not exist, nothing happens, and an error is printed """

        self.debug("Loading settings-file from", filename)

        if not isfile(filename):
            self.error("File not found:", filename)
            return

        # Load actual settings file
        parsed_yaml = YAMLEasyLoad(filename)
        self._file_loaded = True

        if "settings" not in parsed_yaml:
            self.error("Missing root entry in settings file:", filename)
            return False

        # Flatten the recursive structure into a single dictionary
        def flatten_and_insert(root, prefix):
            for key, val in iteritems(root):
                if isinstance(val, dict):
                    flatten_and_insert(val, prefix + key + ".")
                else:
                    self._settings[prefix + key] = val

        flatten_and_insert(parsed_yaml["settings"], "")
        self.debug("Loaded", len(self._settings), "settings")
    def create(self):
        """ This creates the pipeline, and setups all buffers. It also
        constructs the showbase. The settings should have been loaded before
        calling this, and also the base and write path should have been
        initialized properly (see MountManager). """

        start_time = time.time()

        if not self._mount_mgr.is_mounted:
            self.debug("Mount manager was not mounted, mounting now ...")
            self._mount_mgr.mount()

        if not self._settings.is_file_loaded():
            self.debug("No settings loaded, loading from default location")
            self._settings.load_from_file("$$Config/pipeline.yaml")

        # Check if the pipeline was properly installed, before including anything else
        if not isfile("Data/install.flag"):
            DebugObject.global_error("CORE", "You didn't setup the pipeline yet! Please run setup.py.")
            sys.exit(1)

        # Load the default prc config
        load_prc_file("$$Config/configuration.prc")

        # Construct the showbase and init global variables
        ShowBase.__init__(self._showbase)
        self._init_globals()

        # Create the loading screen
        self._loading_screen.create()
        self._adjust_camera_settings()
        self._create_managers()

        # Init the onscreen debugger
        self._init_debugger()

        # Load plugins and daytime settings
        self._plugin_mgr.load_plugins()
        self._daytime_mgr.load_settings()
        self._com_resources.write_config()

        # Setup common defines
        self._create_common_defines()

        # Let the plugins setup their stages
        self._plugin_mgr.trigger_hook("on_stage_setup")
        self._setup_managers()
        self._plugin_mgr.trigger_hook("on_pipeline_created")

        # Set the default effect on render
        self.set_effect(Globals.render, "Effects/Default.yaml", {}, -10)

        # Hide the loading screen
        self._loading_screen.remove()

        self._start_listener()

        # Measure how long it took to initialize everything
        init_duration = int((time.time() - start_time) * 1000.0)
        self.debug("Finished initialization in {} ms".format(init_duration))
    def pre_showbase_init(self):
        """ Setups all required pipeline settings and configuration which have
        to be set before the showbase is setup. This is called by create(),
        in case the showbase was not initialized, however you can (and have to)
        call it manually before you init your custom showbase instance.
        See the 00-Loading the pipeline sample for more information."""

        if not self.mount_mgr.is_mounted:
            self.debug("Mount manager was not mounted, mounting now ...")
            self.mount_mgr.mount()

        if not self.settings:
            self.debug("No settings loaded, loading from default location")
            self.load_settings("/$$rpconfig/pipeline.yaml")

        # Check if the pipeline was properly installed, before including anything else
        if not isfile("/$$rp/data/install.flag"):
            self.fatal(
                "You didn't setup the pipeline yet! Please run setup.py.")

        # Load the default prc config
        load_prc_file("/$$rpconfig/panda3d-config.prc")

        # Set the initialization flag
        self._pre_showbase_initialized = True
Exemple #7
0
    def get_lock(self):
        """ Checks if we are the only instance running. If there is no instance
        running, write the current PID to the instance.pid file. If the
        instance file exists, checks if the specified process still runs. This
        way only 1 instance of the pipeline can be run at one time. """

        # Check if there is a lockfile at all
        if isfile(self._lock_file):
            # Read process id from lockfile
            try:
                with open(self._lock_file, "r") as handle:
                    pid = int(handle.readline())
            except IOError as msg:
                self.error("Failed to read lockfile:", msg)
                return False

            # Check if the process is still running
            if self._is_pid_running(pid):
                self.error("Found running instance")
                return False

            # Process is not running anymore, we can write the lockfile
            self._write_lock()
            return True

        else:
            # When there is no lockfile, just create it and continue
            self._write_lock()
            return True
Exemple #8
0
    def _on_exit_cleanup(self):
        """ Gets called when the application exists """

        if self._do_cleanup:
            self.debug("Cleaning up ..")

            if self._write_path is not None:

                # Try removing the lockfile
                self._try_remove(self._lock_file)

                # Check for further tempfiles in the write path
                # We explicitely use os.listdir here instead of pandas listdir,
                # to work with actual paths
                for fname in os.listdir(self._write_path):
                    pth = join(self._write_path, fname)

                    # Tempfiles from the pipeline start with "$$" to distinguish
                    # them from user created files
                    if isfile(pth) and fname.startswith("$$"):
                        self._try_remove(pth)

                # Delete the write path if no files are left
                if len(os.listdir(self._write_path)) < 1:
                    try:
                        os.removedirs(self._write_path)
                    except IOError:
                        pass
    def get_lock(self):
        """ Checks if we are the only instance running. If there is no instance
        running, write the current PID to the instance.pid file. If the
        instance file exists, checks if the specified process still runs. This
        way only 1 instance of the pipeline can be run at one time. """

        # Check if there is a lockfile at all
        if isfile(self._lock_file):
            # Read process id from lockfile
            try:
                with open(self._lock_file, "r") as handle:
                    pid = int(handle.readline())
            except IOError as msg:
                self.error("Failed to read lockfile:", msg)
                return False

            # Check if the process is still running
            if self._is_pid_running(pid):
                self.error("Found running instance")
                return False

            # Process is not running anymore, we can write the lockfile
            self._write_lock()
            return True

        else:
            # When there is no lockfile, just create it and continue
            self._write_lock()
            return True
    def load_plugin_config(self):
        """ Loads the plugin configuration from the pipeline Config directory,
        and gets the list of enabled plugins and settings from that. """
        plugin_cfg = "$$Config/plugins.yaml"

        if not isfile(plugin_cfg):
            raise PluginConfigError("Could not find plugin config at " + plugin_cfg)

        content = YAMLEasyLoad(plugin_cfg)

        if content is None:
            raise PluginConfigError("Plugin config is empty!")

        # Check if all required keys are in the yaml file
        if "enabled" not in content:
            raise PluginConfigError("Could not find key 'enabled' in plugin config")
        if "overrides" not in content:
            raise PluginConfigError("Could not find key 'overrides' in plugin config")

        # Get the list of enabled plugin ID's
        if content["enabled"]:
            self._enabled_plugins = content["enabled"]
        else:
            self._enabled_plugins = []

        # Get the list of setting overrides
        if content["overrides"]:
            self._overrides = content["overrides"]
        else:
            self._overrides = {}
Exemple #11
0
    def startConvert(self):

        filename = str(self.ipt_source.text())
        self.btn_showResult.setEnabled(False)

        if len(filename) < 1 or not isfile(filename):
            QtGui.QMessageBox.warning(
                self, "Voxelizer",
                "You have to select a valid source file first!")
            return

        parentDir = "/".join(filename.split("/")[:-1])
        destination = join(parentDir, "voxelized")
        print "ParentDir:", parentDir

        voxelGridSize = 32
        if self.chb_gridSize16.isChecked():
            voxelGridSize = 16
        elif self.chb_gridSize32.isChecked():
            voxelGridSize = 32
        elif self.chb_gridSize64.isChecked():
            voxelGridSize = 64
        elif self.chb_gridSize128.isChecked():
            voxelGridSize = 128
        elif self.chb_gridSize256.isChecked():
            voxelGridSize = 256
        elif self.chb_gridSize512.isChecked():
            voxelGridSize = 512

        borderSize = float(self.box_borderSize.value())
        self.clearLog()
        self.addLog("Starting to convert ..")
        self.processStatus.setValue(0)

        result = False

        if True:
            # try:
            result = self.showbase.voxelize(
                filename,
                parentDir,
                destination, {
                    "gridResolution": voxelGridSize,
                    "border": borderSize,
                },
                logCallback=self._progressCallback)
        # except Exception, msg:
        if False:
            self.addLog("Fatal error during conversion process!")
            self.addLog("Message: " + str(msg))

        self.processStatus.setValue(0)

        if not result:
            self.addLog("Error: Voxelizer returned non-success statuscode!")
        else:
            self.btn_showResult.setEnabled(True)
            self.lastResultData = (filename, destination)
 def _create_shaders(self):
     """ Creates all the shaders used for precomputing """
     self._shaders = {}
     resource_path = self._handle.get_shader_resource("eric_bruneton")
     for fname in listdir(resource_path):
         fpath = join(resource_path, fname)
         if isfile(fpath) and fname.endswith(".compute.glsl"):
             shader_name = fname.split(".")[0]
             shader_obj = Shader.load_compute(Shader.SL_GLSL, fpath)
             self._shaders[shader_name] = shader_obj
 def create_shaders(self):
     """ Creates all the shaders used for precomputing """
     self.shaders = {}
     resource_path = self.handle.get_shader_resource("eric_bruneton")
     for fname in listdir(resource_path):
         fpath = join(resource_path, fname)
         if isfile(fpath) and fname.endswith(".compute.glsl"):
             shader_name = fname.split(".")[0]
             shader_obj = RPLoader.load_shader(fpath)
             self.shaders[shader_name] = shader_obj
    def startConvert(self):

        filename = str(self.ipt_source.text())
        self.btn_showResult.setEnabled(False)

        if len(filename) < 1 or not isfile(filename):
            QtGui.QMessageBox.warning(
                self, "Voxelizer", "You have to select a valid source file first!")
            return

        parentDir = "/".join(filename.split("/")[:-1])
        destination = join(parentDir, "voxelized")
        print "ParentDir:", parentDir

        voxelGridSize = 32
        if self.chb_gridSize16.isChecked():
            voxelGridSize = 16
        elif self.chb_gridSize32.isChecked():
            voxelGridSize = 32
        elif self.chb_gridSize64.isChecked():
            voxelGridSize = 64
        elif self.chb_gridSize128.isChecked():
            voxelGridSize = 128
        elif self.chb_gridSize256.isChecked():
            voxelGridSize = 256
        elif self.chb_gridSize512.isChecked():
            voxelGridSize = 512

        borderSize = float(self.box_borderSize.value())
        self.clearLog()
        self.addLog("Starting to convert ..")
        self.processStatus.setValue(0)

        result = False

        if True:
        # try:
            result = self.showbase.voxelize(filename, parentDir, destination, {
                "gridResolution": voxelGridSize,
                "border": borderSize,
            }, logCallback=self._progressCallback)
        # except Exception, msg:
        if False:
            self.addLog("Fatal error during conversion process!")
            self.addLog("Message: " + str(msg))

        self.processStatus.setValue(0)

        if not result:
            self.addLog("Error: Voxelizer returned non-success statuscode!")
        else:
            self.btn_showResult.setEnabled(True)
            self.lastResultData = (filename, destination)
    def _handleIncludes(self, source):
        """ Internal (recursive) method to parse #include's """

        with open(source, "r") as handle:
            content = handle.readlines()

        newContent = ""
        includeIdentifier = "#include "

        # Iterate through lines
        for line_idx, line in enumerate(content):
            lineStrip = line.strip()
            if lineStrip.startswith(includeIdentifier):
                includePart = lineStrip[len(includeIdentifier):].strip()

                # Filename is surrounded by braces
                # Todo: maybe also support ->'<- additionally to ->"<-
                if includePart.startswith('"') and includePart.endswith('"'):

                    # Special case
                    if includePart == '"%ShaderAutoConfig%"':
                        properIncludePart = "PipelineTemp/ShaderAutoConfig.include"
                    else:
                        # Extract include part
                        properIncludePart = Filename.fromOsSpecific(join(
                            self._GlobalShaderPath, includePart[1:-1])).toOsGeneric()

                    # And check if file exists
                    if isfile(properIncludePart):

                        # Check for recursive includes
                        if properIncludePart in self._GlobalIncludeStack:
                            # print "BetterShader: Ignoring recursive
                            # include:",properIncludePart
                            pass

                        else:
                            self._GlobalIncludeStack.append(properIncludePart)
                            newContent += "\n// FILE: '" + \
                                str(properIncludePart) + "' \n"
                            newContent += self._handleIncludes(
                                properIncludePart).strip() + "\n"
                    else:
                        print "BetterShader: Failed to load '" + str(properIncludePart) + "'!"
                else:
                    print "BetterShader: Invalid include:", includePart

                continue

            newContent += line.rstrip() + "\n"

        return newContent
    def _load_overrides(self):
        """ Loads the overrides from the daytime config file """
        cfg_file = "$$Config/daytime.yaml"

        if not isfile(cfg_file):
            self.error("Could not load daytime overrides, file not found: ", cfg_file)
            return False

        yaml = YAMLEasyLoad(cfg_file)

        if "control_points" not in yaml:
            self.error("Root entry 'control_points' not found in daytime settings!")
            return False

        self._load_control_points(yaml["control_points"])
    def loadFromFile(self, filename):
        """ Attempts to load settings from a given file. When the file
        does not exist, nothing happens, and an error is printed """

        self.debug("Loading ini-file from", filename)

        if not isfile(filename):
            self.error("File not found:", filename)
            return

        handle = open(filename, "r")
        content = handle.readlines()
        handle.close()

        # Set to default settings
        for name, setting in self.settings.iteritems():
            setting.setValue(setting.default)
            setattr(self, name, setting.default)

        # Read new settings

        for line in content:
            line = line.strip()

            # Empty line, comment, or section
            if len(line) < 1 or line[0] in ["//", "#", "["]:
                continue

            # No assignment
            if "=" not in line:
                self.warn("Ignoring invalid line:", line)
                continue

            parts = line.split("=")
            settingName = parts[0].strip()
            settingValue = ""

            if len(parts) > 1:
                settingValue = parts[1].strip()

            if settingName not in self.settings:
                self.warn("Unrecognized setting:", settingName)
                continue

            self.settings[settingName].setValue(settingValue)
            setattr(self, settingName, self.settings[settingName].getValue())
    def loadFromFile(self, filename):
        """ Attempts to load settings from a given file. When the file
        does not exist, nothing happens, and an error is printed """

        self.debug("Loading ini-file from", filename)

        if not isfile(filename):
            self.error("File not found:", filename)
            return

        handle = open(filename, "r")
        content = handle.readlines()
        handle.close()

        # Set to default settings
        for name, setting in self.settings.iteritems():
            setting.setValue(setting.default)
            setattr(self, name, setting.default)

        # Read new settings
        
        for line in content:
            line = line.strip()

            # Empty line, comment, or section
            if len(line) < 1 or line[0] in ["//", "#", "["]:
                continue

            # No assignment
            if "=" not in line:
                self.warn("Ignoring invalid line:", line)
                continue

            parts = line.split("=")
            settingName = parts[0].strip()
            settingValue = ""

            if len(parts) > 1:
                settingValue = parts[1].strip()

            if settingName not in self.settings:
                self.warn("Unrecognized setting:", settingName)
                continue

            self.settings[settingName].setValue(settingValue)
            setattr(self, settingName, self.settings[settingName].getValue())
    def load(self):
        """ Loads the scattering method """
        lut_src = self._handle.get_resource("HosekWilkieScattering/ScatteringLUT.png")

        if not isfile(lut_src):
            self.error("Could not find precompiled LUT for the Hosek Wilkie "
                       "Scattering! Make sure you compiled the algorithm code!")
            return

        lut_tex = SliceLoader.load_3d_texture(lut_src, 512, 128, 100)
        lut_tex.set_wrap_u(Texture.WM_repeat)
        lut_tex.set_wrap_v(Texture.WM_clamp)
        lut_tex.set_wrap_w(Texture.WM_clamp)
        lut_tex.set_minfilter(Texture.FT_linear)
        lut_tex.set_magfilter(Texture.FT_linear)
        lut_tex.set_format(Texture.F_rgb16)

        self._handle._display_stage.set_shader_input("ScatteringLUT", lut_tex)
Exemple #20
0
    def load(self, filename):
        """ Loads a profile from a given filename and returns the internal
        used index which can be assigned to a light."""

        # Make sure the user can load profiles directly from the ies profile folder
        data_path = join("/$$rp/rpcore/data/ies_profiles/", filename)
        if isfile(data_path):
            filename = data_path

        # Make filename unique
        fname = Filename.from_os_specific(filename)
        if not VirtualFileSystem.get_global_ptr().resolve_filename(
                fname,
                get_model_path().get_value(), "ies"):
            self.error("Could not resolve", filename)
            return -1
        fname = fname.get_fullpath()

        # Check for cache entries
        if fname in self._entries:
            return self._entries.index(fname)

        # Check for out of bounds
        if len(self._entries) >= self._max_entries:
            # TODO: Could remove unused profiles here or regenerate texture
            self.warn(
                "Cannot load IES Profile, too many loaded! (Maximum: 32)")

        # Try loading the dataset, and see what happes
        try:
            dataset = self._load_and_parse_file(fname)
        except InvalidIESProfileException as msg:
            self.warn("Failed to load profile from", filename, ":", msg)
            return -1

        if not dataset:
            return -1

        # Dataset was loaded successfully, now copy it
        dataset.generate_dataset_texture_into(self._storage_tex,
                                              len(self._entries))
        self._entries.append(fname)

        return len(self._entries) - 1
    def pre_showbase_init(self):
        """ Setups all required pipeline settings and configuration which have
        to be set before the showbase is setup. This is called by create(),
        in case the showbase was not initialized, however you can (and have to)
        call it manually before you init your custom showbase instance.
        See the 00-Loading the pipeline sample for more information. """
        if not self.mount_mgr.is_mounted:
            self.debug("Mount manager was not mounted, mounting now ...")
            self.mount_mgr.mount()

        if not self.settings:
            self.debug("No settings loaded, loading from default location")
            self.load_settings("/$$rpconfig/pipeline.yaml")

        if not isfile("/$$rp/data/install.flag"):
            self.fatal("You didn't setup the pipeline yet! Please run setup.py.")

        load_prc_file("/$$rpconfig/panda3d-config.prc")
        self._pre_showbase_initialized = True
Exemple #22
0
    def load(self, filename):
        """ Loads the property values from <filename> """

        self.debug("Loading from", filename)

        if not isfile(filename):
            self.error("Could not load", filename)
            return False

        with open(filename, "r") as handle:
            content = handle.readlines()

        for line in content:
            line = line.strip()
            if len(line) < 1 or line.startswith("#"):
                continue
            parts = line.split()

            if len(parts) != 2:
                self.warn("Invalid line:", line)
                continue

            propId = parts[0]
            propData = parts[1]

            if propId not in self.properties:
                self.warn("Invalid ID:", propId)
                continue

            prop = self.properties[propId]

            if not (propData.startswith("[") and propData.endswith("]")):
                self.warn("Invalid data:", propData)

            propData = propData[1:-1].split(";")
            propData = [prop.propType.convertString(i) for i in propData]

            if len(propData) != 8:
                self.warn("Data count does not match for", propId)
                continue

            prop.values = propData
            prop.recompute()
Exemple #23
0
    def cleanup(self):
        """ Gets called when the application exists """

        self.debug("Cleaning up ..")

        # Try removing the lockfile
        self._tryRemove(self.lockFile)

        # Try removing the shader auto config
        self._tryRemove(join(self.writePath, "ShaderAutoConfig.include"))

        # Check for further tempfiles in the write path
        for f in os.listdir(self.writePath):
            pth = join(self.writePath, f)

            # Tempfiles from the pipeline start with "$$" to avoid removing user-
            # created files.
            if isfile(pth) and f.startswith("$$"):
                self._tryRemove(pth)
    def load(self, filename):
        """ Loads the property values from <filename> """

        self.debug("Loading from", filename)

        if not isfile(filename):
            self.error("Could not load", filename)
            return False

        with open(filename, "r") as handle:
            content = handle.readlines()

        for line in content:
            line = line.strip()
            if len(line) < 1 or line.startswith("#"):
                continue
            parts = line.split()

            if len(parts) != 2:
                self.warn("Invalid line:", line)
                continue

            propId = parts[0]
            propData = parts[1]

            if propId not in self.properties:
                self.warn("Invalid ID:", propId)
                continue

            prop = self.properties[propId]

            if not (propData.startswith("[") and propData.endswith("]")):
                self.warn("Invalid data:", propData)

            propData = propData[1:-1].split(";")
            propData = [prop.propType.convertString(i) for i in propData]

            if len(propData) != 8:
                self.warn("Data count does not match for", propId)
                continue

            prop.values = propData
            prop.recompute()
    def build(cls, texture, view_width, view_height):
        """ Builds a shader to display <texture> in a view port with the size
        <view_width> * <view_height> """
        view_width, view_height = int(view_width), int(view_height)

        cache_key = "/$$rptemp/$$TEXDISPLAY-X{}-Y{}-Z{}-TT{}-CT{}-VW{}-VH{}.frag.glsl".format(
            texture.get_x_size(), texture.get_y_size(), texture.get_z_size(),
            texture.get_texture_type(), texture.get_component_type(),
            view_width, view_height)

        # Only regenerate the file when there is no cache entry for it
        if not isfile(cache_key) or True:
            fragment_shader = cls._build_fragment_shader(
                texture, view_width, view_height)

            with open(cache_key, "w") as handle:
                handle.write(fragment_shader)

        return RPLoader.load_shader(
            "/$$rp/shader/default_gui_shader.vert.glsl", cache_key)
    def load(self, filename):
        """ Loads a profile from a given filename and returns the internal
        used index which can be assigned to a light."""

        # Make sure the user can load profiles directly from the ies profile folder
        data_path = join("/$$rp/data/ies_profiles/", filename)
        if isfile(data_path):
            filename = data_path

        # Make filename unique
        fname = Filename.from_os_specific(filename)
        if not VirtualFileSystem.get_global_ptr().resolve_filename(
                fname, get_model_path().get_value(), "ies"):
            self.error("Could not resolve", filename)
            return -1
        fname = fname.get_fullpath()

        # Check for cache entries
        if fname in self._entries:
            return self._entries.index(fname)

        # Check for out of bounds
        if len(self._entries) >= self._max_entries:
            # TODO: Could remove unused profiles here or regenerate texture
            self.warn("Cannot load IES Profile, too many loaded! (Maximum: 32)")

        # Try loading the dataset, and see what happes
        try:
            dataset = self._load_and_parse_file(fname)
        except InvalidIESProfileException as msg:
            self.warn("Failed to load profile from", filename, ":", msg)
            return -1

        if not dataset:
            return -1

        # Dataset was loaded successfully, now copy it
        dataset.generate_dataset_texture_into(self._storage_tex, len(self._entries))
        self._entries.append(fname)

        return len(self._entries) - 1
    def load(self):
        """ Loads the scattering method """
        lut_src = self.handle.get_resource(
            "hosek_wilkie_scattering/scattering_lut.txo")

        if not isfile(lut_src):
            self.error("Could not find precompiled LUT for the Hosek Wilkie "
                       "Scattering! Make sure you compiled the algorithm code!")
            return

        lut_tex = RPLoader.load_sliced_3d_texture(lut_src, 512, 128, 100)
        lut_tex.set_wrap_u(SamplerState.WM_repeat)
        lut_tex.set_wrap_v(SamplerState.WM_clamp)
        lut_tex.set_wrap_w(SamplerState.WM_clamp)
        lut_tex.set_minfilter(SamplerState.FT_linear)
        lut_tex.set_magfilter(SamplerState.FT_linear)

        # Setting the format explicitely shouldn't be necessary
        # lut_tex.set_format(Image.F_rgb16)

        self.handle.display_stage.set_shader_input("ScatteringLUT", lut_tex)
        self.handle.envmap_stage.set_shader_input("ScatteringLUT", lut_tex)
    def _try_load_plugin(self, plugin_id):
        """ Attempts to load a plugin with a given name """
        plugin_path = join(self._base_dir, "Plugins", plugin_id)
        plugin_main = join(plugin_path, "__init__.py")
        if not isfile(plugin_main):
            self.warn("Cannot load", plugin_id, "because __init__.py was not found")
            return None

        module_path = "Plugins." + plugin_id + ".Plugin"

        try:
            module = importlib.import_module(module_path)
        except Exception as msg:
            self.warn("Could not import", plugin_id, "because of an import error:")
            self.warn(msg)
            return None

        if not hasattr(module, "Plugin"):
            self.warn("Plugin", plugin_id, "has no main Plugin class defined!")
            return None

        return module.Plugin
    def build(cls, texture, view_width, view_height):
        """ Builds a shader to display <texture> in a view port with the size
        <view_width> * <view_height> """
        view_width, view_height = int(view_width), int(view_height)

        cache_key = "$$PipelineTemp/$$TEXDISPLAY-X{}-Y{}-Z{}-TT{}-CT{}-VW{}-VH{}.frag.glsl".format(
            texture.get_x_size(),
            texture.get_y_size(),
            texture.get_z_size(),
            texture.get_texture_type(),
            texture.get_component_type(),
            view_width,
            view_height)

        # Only regenerate the file when there is no cache entry for it
        if not isfile(cache_key) or True:
            fragment_shader = cls._build_fragment_shader(texture, view_width, view_height)

            with open(cache_key, "w") as handle:
                handle.write(fragment_shader)

        return Shader.load(Shader.SL_GLSL, "Shader/GUI/DefaultGUIShader.vert.glsl", cache_key)
    def load(self):
        """ Loads the scattering method """
        lut_src = self.handle.get_resource(
            "HosekWilkieScattering/scattering_luit.png")

        if not isfile(lut_src):
            self.error("Could not find precompiled LUT for the Hosek Wilkie "
                       "Scattering! Make sure you compiled the algorithm code!")
            return

        lut_tex = RPLoader.load_sliced_3d_texture(lut_src, 512, 128, 100)
        lut_tex.set_wrap_u(SamplerState.WM_repeat)
        lut_tex.set_wrap_v(SamplerState.WM_clamp)
        lut_tex.set_wrap_w(SamplerState.WM_clamp)
        lut_tex.set_minfilter(SamplerState.FT_linear)
        lut_tex.set_magfilter(SamplerState.FT_linear)

        # Setting the format explicitely shouldn't be necessary
        # lut_tex.set_format(Image.F_rgb16)

        self.handle.display_stage.set_shader_input("ScatteringLUT", lut_tex)
        self.handle.envmap_stage.set_shader_input("ScatteringLUT", lut_tex)
Exemple #31
0
    def __init__(self):
        load_prc_file_data("", """
            textures-power-2 none
            window-type offscreen
            win-size 100 100
            gl-coordinate-system default
            notify-level-display error
            print-pipe-types #f
        """)

        ShowBase.__init__(self)

        base_path = realpath(dirname(__file__))
        os.chdir(base_path)
        filter_dir = join(base_path, "tmp/")
        if isdir(filter_dir):
            shutil.rmtree(filter_dir)
        os.makedirs(filter_dir)

        source_path = join(base_path, "source")
        extension = ".jpg"
        if isfile(join(source_path, "1.png")):
            extension = ".png"

        cubemap = self.loader.loadCubeMap(
            Filename.from_os_specific(join(source_path, "#" + extension)))
        mipmap, size = -1, 1024

        cshader = Shader.load_compute(Shader.SL_GLSL, "filter.compute.glsl")

        while size > 1:
            size = size // 2
            mipmap += 1
            print("Filtering mipmap", mipmap)

            dest_cubemap = Texture("Dest")
            dest_cubemap.setup_cube_map(size, Texture.T_float, Texture.F_rgba16)
            node = NodePath("")

            for i in range(6):
                node.set_shader(cshader)
                node.set_shader_input("SourceTex", cubemap)
                node.set_shader_input("DestTex", dest_cubemap)
                node.set_shader_input("currentSize", size)
                node.set_shader_input("currentMip", mipmap)
                node.set_shader_input("currentFace", i)
                attr = node.get_attrib(ShaderAttrib)
                self.graphicsEngine.dispatch_compute(
                    ( (size + 15) // 16, (size+15) // 16, 1), attr, self.win.gsg)

            print(" Extracting data ..")

            self.graphicsEngine.extract_texture_data(dest_cubemap, self.win.gsg)

            print(" Writing data ..")
            dest_cubemap.write(join(filter_dir, "{}-#.png".format(mipmap)), 0, 0, True, False)


        print("Reading in data back in ..")
        tex = self.loader.loadCubeMap(Filename.from_os_specific(join(base_path, "tmp/#-#.png")), readMipmaps="True")

        print("Writing txo ..")
        tex.write("cubemap.txo.pz")

        shutil.rmtree(join(base_path, "tmp"))
Exemple #32
0
from __future__ import print_function
import sys
import importlib

from direct.stdpy.file import join, isfile
from os.path import dirname, realpath
from ..Util.DebugObject import DebugObject

# Store a global flag, indicating whether the C++ modules were loaded or the python
# implemetation of them
NATIVE_CXX_LOADED = False

# Read the configuration from the flag-file
curr_path = dirname(realpath(__file__))
flag_path = join(curr_path, "use_cxx.flag")
if not isfile(flag_path):
    DebugObject.global_error("CORE", "Could not find cxx flag, please run the setup.py!")
    sys.exit(1)
else:
    with open(join(curr_path, "use_cxx.flag"), "r") as handle:
        NATIVE_CXX_LOADED = handle.read().strip() == "1"

# The native module should only be imported once, and that by the internal pipeline code
# assert __package__ == "Code.Native", "You have included the pipeline in the wrong way!"

# Classes which should get imported
classes_to_import = [
    "GPUCommand",
    "GPUCommandList",
    "ShadowManager",
    "InternalLightManager",
    def _handleIncludes(self, source):
        """ Internal (recursive) method to parse #include's """

        with open(source, "r") as handle:
            content = handle.readlines()

        newContent = ""
        includeIdentifier = "#include "

        ID = self._ShaderIDs.get(source, None)
        if ID is None:
            ID = self._NextID
            self._ShaderIDs[source] = ID
            # print ID, source
            self._NextID += 1

        newContent += "#line 1 %d\n" % (ID)

        # Iterate through lines
        for line_idx, line in enumerate(content):
            lineStrip = line.strip()
            if lineStrip.startswith(includeIdentifier):
                includePart = lineStrip[len(includeIdentifier):].strip()

                # Filename is surrounded by braces
                if includePart.startswith('"') and includePart.endswith('"'):

                    # Special case
                    if includePart == '"%ShaderAutoConfig%"':
                        properIncludePart = "PipelineTemp/ShaderAutoConfig.include"
                    else:
                        # Extract include part
                        properIncludePart = Filename.fromOsSpecific(join(
                            self._GlobalShaderPath, includePart[1:-1])).toOsGeneric()

                    # And check if file exists
                    if isfile(properIncludePart):

                        # Check for recursive includes
                        if properIncludePart in self._GlobalIncludeStack:
                            # print "BetterShader: Ignoring recursive
                            # include:",properIncludePart
                            pass

                        else:
                            self._GlobalIncludeStack.append(properIncludePart)
                            newContent += "\n// FILE: '" + \
                                str(properIncludePart) + "' \n"

                            newContent += self._handleIncludes(
                                properIncludePart).strip() + "\n"

                            newContent += "#line %d %d\n" % (line_idx + 3, ID)

                    else:
                        print "BetterShader: Failed to load '" + str(properIncludePart) + "'!"
                else:
                    print "BetterShader: Invalid include:", includePart

                continue

            newContent += line.rstrip() + "\n"

        return newContent
from __future__ import print_function
import sys
from os.path import dirname, realpath

from direct.stdpy.file import join, isfile
from rpcore.rpobject import RPObject

# Store a global flag, indicating whether the C++ modules were loaded or the python
# implemetation of them
NATIVE_CXX_LOADED = False

# Read the configuration from the flag-file
current_path = dirname(realpath(__file__))
cxx_flag_path = join(current_path, "use_cxx.flag")
if not isfile(cxx_flag_path):
    RPObject.global_error("CORE", "Could not find cxx flag, please run the setup.py!")
    sys.exit(1)
else:
    with open(join(current_path, "use_cxx.flag"), "r") as handle:
        NATIVE_CXX_LOADED = handle.read().strip() == "1"

# The native module should only be imported once, and that by the internal pipeline code
assert __package__ == "rpcore.native", "You have included the pipeline in the wrong way!"

# Classes which should get imported
classes_to_import = [
    "GPUCommand",
    "GPUCommandList",
    "ShadowManager",
    "InternalLightManager",
Exemple #35
0
    def __init__(self):
        load_prc_file_data(
            "", """
            textures-power-2 none
            window-type offscreen
            win-size 100 100
            gl-coordinate-system default
            notify-level-display error
            print-pipe-types #f
        """)

        ShowBase.__init__(self)

        base_path = realpath(dirname(__file__))
        os.chdir(base_path)
        filter_dir = join(base_path, "tmp/")
        if isdir(filter_dir):
            shutil.rmtree(filter_dir)
        os.makedirs(filter_dir)

        source_path = join(base_path, "source")
        extension = ".jpg"
        if isfile(join(source_path, "1.png")):
            extension = ".png"

        cubemap = self.loader.loadCubeMap(
            Filename.from_os_specific(join(source_path, "#" + extension)))
        mipmap, size = -1, 1024

        cshader = Shader.load_compute(Shader.SL_GLSL, "filter.compute.glsl")

        while size > 1:
            size = size // 2
            mipmap += 1
            print("Filtering mipmap", mipmap)

            dest_cubemap = Texture("Dest")
            dest_cubemap.setup_cube_map(size, Texture.T_float,
                                        Texture.F_rgba16)
            node = NodePath("")

            for i in range(6):
                node.set_shader(cshader)
                node.set_shader_input("SourceTex", cubemap)
                node.set_shader_input("DestTex", dest_cubemap)
                node.set_shader_input("currentSize", size)
                node.set_shader_input("currentMip", mipmap)
                node.set_shader_input("currentFace", i)
                attr = node.get_attrib(ShaderAttrib)
                self.graphicsEngine.dispatch_compute(
                    ((size + 15) // 16, (size + 15) // 16, 1), attr,
                    self.win.gsg)

            print(" Extracting data ..")

            self.graphicsEngine.extract_texture_data(dest_cubemap,
                                                     self.win.gsg)

            print(" Writing data ..")
            dest_cubemap.write(join(filter_dir, "{}-#.png".format(mipmap)), 0,
                               0, True, False)

        print("Reading in data back in ..")
        tex = self.loader.loadCubeMap(Filename.from_os_specific(
            join(base_path, "tmp/#-#.png")),
                                      readMipmaps="True")

        print("Writing txo ..")
        tex.write("cubemap.txo.pz")

        shutil.rmtree(join(base_path, "tmp"))
Exemple #36
0
cwd = getcwd().replace("\\", "/").rstrip("/")

ignoreFiles = []


def checkIgnore(source):
    for f in ignoreFiles:
        if f.lower() in source.lower():
            return False
    return True


allSources = [
    i for i in listdir("Source")
    if isfile(join("Source", i)) and checkIgnore(i) and i.endswith(".h")
]

allSourcesStr = ' '.join(['"' + i + '"' for i in allSources])

print("\nRunning interrogate ..")

command = PANDA_BIN + \
    "/interrogate -D__inline -DCPPPARSER -DP3_INTERROGATE=1 -D__cplusplus "
command += "-fnames -string -refcount -assert "
command += "-Dvolatile= "
command += "-DWIN32= "
command += "-DWIN32_VC= "
command += "-D_WINDOWS= "
command += "-S" + PANDA_INCLUDE + "/parser-inc "
command += "-S" + PANDA_INCLUDE + "/ "
PANDA_INCLUDE = sys.argv[3]



cwd = getcwd().replace("\\", "/").rstrip("/")

ignoreFiles = []

def checkIgnore(source):
    for f in ignoreFiles:
        if f.lower() in source.lower():
            return False
    return True


allSources = [i for i in listdir("Source") if isfile(join("Source", i)) and checkIgnore(i) and i.endswith(".h") ]


allSourcesStr = ' '.join(['"' + i + '"' for i in allSources])

print "\nRunning interrogate .."

command = PANDA_BIN + \
    "/interrogate -D__inline -DCPPPARSER -DP3_INTERROGATE=1 -D__cplusplus "
command += "-fnames -string -refcount -assert "
command += "-Dvolatile= "
command += "-S" + PANDA_INCLUDE + "/parser-inc "
command += "-S" + PANDA_INCLUDE + "/ "

# command += "-I" + PANDA_BUILT + "/bin/include/ "
command += "-oc Source/InterrogateModule.cxx "