Пример #1
0
    def mount(self):
        """ Inits the VFS Mounts """

        self.debug("Setting up virtual filesystem.")
        vfs = VirtualFileSystem.getGlobalPtr()

        # Mount shaders
        vfs.mountLoop(
            join(self.basePath, 'Shader'), 'Shader', 0)

        # Mount data and models
        vfs.mountLoop(join(self.basePath, 'Data'), 'Data', 0)
        vfs.mountLoop(join(self.basePath, 'Models'), 'Models', 0)
        vfs.mountLoop(join(self.basePath, 'Config'), 'Config', 0)
        vfs.mountLoop(join(self.basePath, 'Demoscene.ignore'), 'Demoscene.ignore', 0)

        # Just mount everything
        # vfs.mountLoop(self.basePath, '.', 0)

        if not isdir(self.writePath):
            self.debug("Creating temp path, as it does not exist yet")
            try:
                makedirs(self.writePath)
            except Exception, msg:
                self.error("Failed to create temp path:",msg)
                import sys
                sys.exit(0)
Пример #2
0
    def __init__(self):
        load_prc_file_data("", """
            textures-power-2 none
            window-type offscreen
            win-size 100 100
            gl-coordinate-system default
            notify-level-display error
            print-pipe-types #f
        """)

        ShowBase.__init__(self)

        base_path = realpath(dirname(__file__))
        os.chdir(base_path)
        filter_dir = join(base_path, "tmp/")
        if isdir(filter_dir):
            shutil.rmtree(filter_dir)
        os.makedirs(filter_dir)

        cubemap = self.loader.loadCubeMap(Filename.from_os_specific(join(base_path, "source/#.jpg")))
        mipmap, size = -1, 512

        cshader = Shader.load_compute(Shader.SL_GLSL, "filter.compute.glsl")

        while size > 1:
            size = size // 2
            mipmap += 1
            print("Filtering mipmap", mipmap)

            dest_cubemap = Texture("Dest")
            dest_cubemap.setup_cube_map(size, Texture.T_float, Texture.F_rgba16)
            node = NodePath("")

            for i in range(6):
                node.set_shader(cshader)
                node.set_shader_input("SourceTex", cubemap)
                node.set_shader_input("DestTex", dest_cubemap)
                node.set_shader_input("currentSize", size)
                node.set_shader_input("currentMip", mipmap)
                node.set_shader_input("currentFace", i)
                attr = node.get_attrib(ShaderAttrib)
                self.graphicsEngine.dispatch_compute(
                    ( (size + 15) // 16, (size+15) // 16, 1), attr, self.win.get_gsg())

            print(" Extracting data ..")

            self.graphicsEngine.extract_texture_data(dest_cubemap, self.win.get_gsg())

            print(" Writing data ..")
            dest_cubemap.write(join(filter_dir, "{}-#.png".format(mipmap)), 0, 0, True, False)


        print("Reading in data back in ..")
        tex = self.loader.loadCubeMap(Filename.from_os_specific(join(base_path, "tmp/#-#.png")), readMipmaps="True")

        print("Writing txo ..")
        tex.write("cubemap.txo.pz")

        shutil.rmtree(join(base_path, "tmp"))
 def get_available_plugins(self):
     """ Returns a list of all installed plugins, no matter if they are
     enabled or not. This also does no check if the plugin names are valid. """
     plugins = []
     files = listdir(join(self._base_dir, "Plugins"))
     for fname in files:
         abspath = join(self._base_dir, "Plugins", fname)
         if isdir(abspath) and fname not in ["PluginPrefab"]:
             plugins.append(fname)
     return plugins
Пример #4
0
    def load_plugin_settings(self, plugin_id, plugin_pth):
        """ Internal method to load all settings of a plugin, given its plugin
        id and path to the plugin base directory """
        config_file = join(plugin_pth, "config.yaml")
        config = load_yaml_file(config_file)
        # When you don't specify anything in the settings, instead of
        # returning an empty dictionary, pyyaml returns None
        config["settings"] = config["settings"] or []
        config["daytime_settings"] = config["daytime_settings"] or []

        if isinstance(config["settings"], dict) or isinstance(config["daytime_settings"], dict) or \
            (config["settings"] and len(config["settings"][0]) != 2) or \
            (config["daytime_settings"] and len(config["daytime_settings"][0]) != 2):
            self.error("Malformed config for plugin", plugin_id,
                       "- did you miss '!!omap' ?")

        settings = collections.OrderedDict([(k, make_setting_from_data(v))
                                            for k, v in config["settings"]])
        self.settings[plugin_id] = settings

        if self.requires_daytime_settings:
            daysettings = collections.OrderedDict([
                (k, make_daysetting_from_data(v))
                for k, v in config["daytime_settings"]
            ])
            self.day_settings[plugin_id] = daysettings
Пример #5
0
    def _on_exit_cleanup(self):
        """ Gets called when the application exists """

        if self._do_cleanup:
            self.debug("Cleaning up ..")

            if self._write_path is not None:

                # Try removing the lockfile
                self._try_remove(self._lock_file)

                # Check for further tempfiles in the write path
                # We explicitely use os.listdir here instead of pandas listdir,
                # to work with actual paths
                for fname in os.listdir(self._write_path):
                    pth = join(self._write_path, fname)

                    # Tempfiles from the pipeline start with "$$" to distinguish
                    # them from user created files
                    if isfile(pth) and fname.startswith("$$"):
                        self._try_remove(pth)

                # Delete the write path if no files are left
                if len(os.listdir(self._write_path)) < 1:
                    try:
                        os.removedirs(self._write_path)
                    except IOError:
                        pass
Пример #6
0
    def _on_exit_cleanup(self):
        """ Gets called when the application exists """

        if self._do_cleanup:
            self.debug("Cleaning up ..")

            if self._write_path is not None:

                # Try removing the lockfile
                self._try_remove(self._lock_file)

                # Check for further tempfiles in the write path
                # We explicitely use os.listdir here instead of pandas listdir,
                # to work with actual paths
                for fname in os.listdir(self._write_path):
                    pth = join(self._write_path, fname)

                    # Tempfiles from the pipeline start with "$$" to distinguish
                    # them from user created files
                    if isfile(pth) and fname.startswith("$$"):
                        self._try_remove(pth)

                # Delete the write path if no files are left
                if len(os.listdir(self._write_path)) < 1:
                    try:
                        os.removedirs(self._write_path)
                    except IOError:
                        pass
Пример #7
0
 def load_base_settings(self, plugin_dir):
     """ Loads the base settings of all plugins, even of disabled plugins.
     This is required to verify all overrides. """
     for entry in listdir(plugin_dir):
         abspath = join(plugin_dir, entry)
         if isdir(abspath) and entry not in ("__pycache__", "plugin_prefab"):
             self.load_plugin_settings(entry, abspath)
Пример #8
0
 def load_base_settings(self, plugin_dir):
     """ Loads the base settings of all plugins, even of disabled plugins.
     This is required to verify all overrides. """
     for entry in listdir(plugin_dir):
         abspath = join(plugin_dir, entry)
         if isdir(abspath) and entry not in ("__pycache__",
                                             "plugin_prefab"):
             self.load_plugin_settings(entry, abspath)
 def finish(self):
     f=open(join(self.dstDir,dataFile),"w")
     d={"default":self.default,
         "extraInfo":self.extraInfo,
         "tiles":list(self.processed)
     }
     json.dump(d,f)
     f.close()
 def wrapper(srcDir,name,callback=None):
     def done(model):
         callback(call(RenderTile(model)))
     path=join(srcDir,name+".bam")
     if callback:
         loader.loadModel(path,callback=done)
     else:
         return call(RenderTile(loader.loadModel(path)))
def importTile(srcDir,name,callback=None):
    def done(model):
        callback(RenderTile(model))
    path=join(srcDir,name+".bam")
    if callback:
        loader.loadModel(path,callback=done)
    else:
        return RenderTile(loader.loadModel(path))
Пример #12
0
def importTile(srcDir,name,callback=None):
    def done(model):
        callback(RenderTile(model))
    path=join(srcDir,name+".bam")
    if callback:
        loader.loadModel(path,callback=done)
    else:
        return RenderTile(loader.loadModel(path))
Пример #13
0
 def finish(self):
     f=open(join(self.dstDir,dataFile),"w")
     d={"default":self.default,
         "extraInfo":self.extraInfo,
         "tiles":list(self.processed)
     }
     json.dump(d,f)
     f.close()
Пример #14
0
    def startConvert(self):

        filename = str(self.ipt_source.text())
        self.btn_showResult.setEnabled(False)

        if len(filename) < 1 or not isfile(filename):
            QtGui.QMessageBox.warning(
                self, "Voxelizer",
                "You have to select a valid source file first!")
            return

        parentDir = "/".join(filename.split("/")[:-1])
        destination = join(parentDir, "voxelized")
        print "ParentDir:", parentDir

        voxelGridSize = 32
        if self.chb_gridSize16.isChecked():
            voxelGridSize = 16
        elif self.chb_gridSize32.isChecked():
            voxelGridSize = 32
        elif self.chb_gridSize64.isChecked():
            voxelGridSize = 64
        elif self.chb_gridSize128.isChecked():
            voxelGridSize = 128
        elif self.chb_gridSize256.isChecked():
            voxelGridSize = 256
        elif self.chb_gridSize512.isChecked():
            voxelGridSize = 512

        borderSize = float(self.box_borderSize.value())
        self.clearLog()
        self.addLog("Starting to convert ..")
        self.processStatus.setValue(0)

        result = False

        if True:
            # try:
            result = self.showbase.voxelize(
                filename,
                parentDir,
                destination, {
                    "gridResolution": voxelGridSize,
                    "border": borderSize,
                },
                logCallback=self._progressCallback)
        # except Exception, msg:
        if False:
            self.addLog("Fatal error during conversion process!")
            self.addLog("Message: " + str(msg))

        self.processStatus.setValue(0)

        if not result:
            self.addLog("Error: Voxelizer returned non-success statuscode!")
        else:
            self.btn_showResult.setEnabled(True)
            self.lastResultData = (filename, destination)
 def _find_basepath(self):
     """ Attempts to find the pipeline base path by looking at the location
     of this file """
     pth = os.path.abspath(
         join(os.path.dirname(os.path.realpath(__file__)), ".."))
     filename = Filename.from_os_specific(pth)
     # convert lib/site-packages to Lib/site-packages on windows
     filename.make_true_case()
     return filename.get_fullpath()
Пример #16
0
    def cleanup(self):
        """ Gets called when the application exists """

        self.debug("Cleaning up ..")

        # Try removing the lockfile
        self._tryRemove(self.lockFile)

        # Try removing the shader auto config
        self._tryRemove(join(self.writePath, "ShaderAutoConfig.include"))

        # Check for further tempfiles in the write path
        for f in os.listdir(self.writePath):
            pth = join(self.writePath, f)

            # Tempfiles from the pipeline start with "$$" to avoid removing user-
            # created files.
            if isfile(pth) and f.startswith("$$"):
                self._tryRemove(pth)
 def create_shaders(self):
     """ Creates all the shaders used for precomputing """
     self.shaders = {}
     resource_path = self.handle.get_shader_resource("eric_bruneton")
     for fname in listdir(resource_path):
         fpath = join(resource_path, fname)
         if isfile(fpath) and fname.endswith(".compute.glsl"):
             shader_name = fname.split(".")[0]
             shader_obj = RPLoader.load_shader(fpath)
             self.shaders[shader_name] = shader_obj
Пример #18
0
 def _create_shaders(self):
     """ Creates all the shaders used for precomputing """
     self._shaders = {}
     resource_path = self._handle.get_shader_resource("eric_bruneton")
     for fname in listdir(resource_path):
         fpath = join(resource_path, fname)
         if isfile(fpath) and fname.endswith(".compute.glsl"):
             shader_name = fname.split(".")[0]
             shader_obj = Shader.load_compute(Shader.SL_GLSL, fpath)
             self._shaders[shader_name] = shader_obj
Пример #19
0
    def setWritePath(self, pth):
        """ Set a writable directory for generated files. This can be a string
        path name or a multifile with openReadWrite(). If no pathname is set
        then the root directory is used.

        Applications are usually installed system wide and wont have write
        access to the basePath. It will be wise to at least use tempfile
        like tempfile.mkdtemp(prefix='Shader-tmp'), or an application directory
        in the user's home/app dir."""
        self.writePath = Filename.fromOsSpecific(pth).getFullpath()
        self.lockFile = join(self.writePath, "instance.pid")
 def __init__(self,dir,importer):
     path=join(dir,dataFile)
     d=json.load(open(path,'r'))
     self.default=d['default']
     extraInfo=d['extraInfo']
     self.tileSize=extraInfo['size']
     self.originX=extraInfo['originX']
     self.originY=extraInfo['originY']
     self.tiles=set(tuple(t) for t in d['tiles'])
     self.dir=dir
     self.importer=importer
Пример #21
0
 def __init__(self,dir,importer):
     path=join(dir,dataFile)
     d=json.load(open(path,'r'))
     self.default=d['default']
     extraInfo=d['extraInfo']
     self.tileSize=extraInfo['size']
     self.originX=extraInfo['originX']
     self.originY=extraInfo['originY']
     self.tiles=set(tuple(t) for t in d['tiles'])
     self.dir=dir
     self.importer=importer
Пример #22
0
    def mount(self):
        """ Inits the VFS Mounts """

        self.debug("Setting up virtual filesystem.")
        vfs = VirtualFileSystem.getGlobalPtr()

        # Mount data and models
        vfs.mountLoop(join(self.basePath, 'Data'), 'Data', 0)
        vfs.mountLoop(join(self.basePath, 'Models'), 'Models', 0)
        vfs.mountLoop(join(self.basePath, 'Config'), 'Config', 0)

        # Ensure the pipeline write path exists, and if not, create it
        if not isdir(self.writePath):
            self.debug("Creating temp path, as it does not exist yet")
            try:
                os.makedirs(self.writePath, 0777)
            except Exception, msg:
                self.error("Failed to create temp path:",msg)
                import sys
                sys.exit(0)
Пример #23
0
    def startConvert(self):

        filename = str(self.ipt_source.text())
        self.btn_showResult.setEnabled(False)

        if len(filename) < 1 or not isfile(filename):
            QtGui.QMessageBox.warning(
                self, "Voxelizer", "You have to select a valid source file first!")
            return

        parentDir = "/".join(filename.split("/")[:-1])
        destination = join(parentDir, "voxelized")
        print "ParentDir:", parentDir

        voxelGridSize = 32
        if self.chb_gridSize16.isChecked():
            voxelGridSize = 16
        elif self.chb_gridSize32.isChecked():
            voxelGridSize = 32
        elif self.chb_gridSize64.isChecked():
            voxelGridSize = 64
        elif self.chb_gridSize128.isChecked():
            voxelGridSize = 128
        elif self.chb_gridSize256.isChecked():
            voxelGridSize = 256
        elif self.chb_gridSize512.isChecked():
            voxelGridSize = 512

        borderSize = float(self.box_borderSize.value())
        self.clearLog()
        self.addLog("Starting to convert ..")
        self.processStatus.setValue(0)

        result = False

        if True:
        # try:
            result = self.showbase.voxelize(filename, parentDir, destination, {
                "gridResolution": voxelGridSize,
                "border": borderSize,
            }, logCallback=self._progressCallback)
        # except Exception, msg:
        if False:
            self.addLog("Fatal error during conversion process!")
            self.addLog("Message: " + str(msg))

        self.processStatus.setValue(0)

        if not result:
            self.addLog("Error: Voxelizer returned non-success statuscode!")
        else:
            self.btn_showResult.setEnabled(True)
            self.lastResultData = (filename, destination)
Пример #24
0
    def __init__(self):
        load_prc_file_data("", """
            textures-power-2 none
            window-type offscreen
            win-size 100 100
            gl-coordinate-system default
            notify-level-display error
            print-pipe-types #f
        """)

        ShowBase.__init__(self)

        base_path = realpath(dirname(__file__))
        os.chdir(base_path)
        slice_dir = join(base_path, "slices/")
        if isdir(slice_dir):
            shutil.rmtree(slice_dir)
        os.makedirs(slice_dir)

        node = NodePath("")

        w, h, d = 512, 512, 64

        self.voxel_grid = Texture("voxels")
        self.voxel_grid.setup_3d_texture(w, h, d, Texture.T_unsigned_byte, Texture.F_rgba8)


        # Generate grid
        cshader = Shader.load_compute(Shader.SL_GLSL, "generate_grid.compute.glsl")
        node.set_shader(cshader)
        node.set_shader_input("DestTex", self.voxel_grid)
        attr = node.get_attrib(ShaderAttrib)

        self.graphicsEngine.dispatch_compute(
            ((w + 7) // 8, (h + 7) // 8, (d + 3) // 4), attr, self.win.get_gsg())

        self.graphicsEngine.extract_texture_data(self.voxel_grid, self.win.get_gsg())

        print("Writing data ..")
        self.voxel_grid.write(Filename.from_os_specific(join(slice_dir, "#.png")), 0, 0, True, False)
Пример #25
0
    def _handleIncludes(self, source):
        """ Internal (recursive) method to parse #include's """

        with open(source, "r") as handle:
            content = handle.readlines()

        newContent = ""
        includeIdentifier = "#include "

        # Iterate through lines
        for line_idx, line in enumerate(content):
            lineStrip = line.strip()
            if lineStrip.startswith(includeIdentifier):
                includePart = lineStrip[len(includeIdentifier):].strip()

                # Filename is surrounded by braces
                # Todo: maybe also support ->'<- additionally to ->"<-
                if includePart.startswith('"') and includePart.endswith('"'):

                    # Special case
                    if includePart == '"%ShaderAutoConfig%"':
                        properIncludePart = "PipelineTemp/ShaderAutoConfig.include"
                    else:
                        # Extract include part
                        properIncludePart = Filename.fromOsSpecific(join(
                            self._GlobalShaderPath, includePart[1:-1])).toOsGeneric()

                    # And check if file exists
                    if isfile(properIncludePart):

                        # Check for recursive includes
                        if properIncludePart in self._GlobalIncludeStack:
                            # print "BetterShader: Ignoring recursive
                            # include:",properIncludePart
                            pass

                        else:
                            self._GlobalIncludeStack.append(properIncludePart)
                            newContent += "\n// FILE: '" + \
                                str(properIncludePart) + "' \n"
                            newContent += self._handleIncludes(
                                properIncludePart).strip() + "\n"
                    else:
                        print "BetterShader: Failed to load '" + str(properIncludePart) + "'!"
                else:
                    print "BetterShader: Invalid include:", includePart

                continue

            newContent += line.rstrip() + "\n"

        return newContent
Пример #26
0
    def _try_load_plugin(self, plugin_id):
        """ Attempts to load a plugin with a given name """
        plugin_path = join(self._base_dir, "Plugins", plugin_id)
        plugin_main = join(plugin_path, "__init__.py")
        if not isfile(plugin_main):
            self.warn("Cannot load", plugin_id, "because __init__.py was not found")
            return None

        module_path = "Plugins." + plugin_id + ".Plugin"

        try:
            module = importlib.import_module(module_path)
        except Exception as msg:
            self.warn("Could not import", plugin_id, "because of an import error:")
            self.warn(msg)
            return None

        if not hasattr(module, "Plugin"):
            self.warn("Plugin", plugin_id, "has no main Plugin class defined!")
            return None

        return module.Plugin
Пример #27
0
 def wrapper(srcDir,name,callback=None):
     def process(model):
         return call(RenderTile(model))
     def done(model):
         callback(process(model))
     path=join(srcDir,name+".bam")
     if callback:
         # TODO: Update this when Panda3d bug is fixed: https://bugs.launchpad.net/panda3d/+bug/1186880
         # To work around this bug, disable async model loading
         #loader.loadModel(path,callback=done)
         done(loader.loadModel(path))
     else:
         return process(loader.loadModel(path))
Пример #28
0
    def mount(self):
        """ Inits the VFS Mounts """

        self.debug("Setting up virtual filesystem.")
        vfs = VirtualFileSystem.getGlobalPtr()

        # Mount shaders
        vfs.mountLoop(
            join(self.basePath, 'Shader'), 'Shader', 0)

        # Mount data
        vfs.mountLoop(join(self.basePath, 'Data'), 'Data', 0)

        # TODO: Mount core

        if not isdir(self.writePath):
            self.debug("Creating temp path, as it does not exist yet")
            try:
                makedirs(self.writePath)
            except Exception, msg:
                self.error("Failed to create temp path:",msg)
                import sys
                sys.exit(0)
Пример #29
0
    def mount(self):
        """ Inits the VFS Mounts """

        self.debug("Setting up virtual filesystem.")
        vfs = VirtualFileSystem.getGlobalPtr()

        # Mount data and models
        vfs.mountLoop(join(self.basePath, 'Data'), 'Data', 0)
        vfs.mountLoop(join(self.basePath, 'Models'), 'Models', 0)
        vfs.mountLoop(join(self.basePath, 'Config'), 'Config', 0)
        vfs.mountLoop(join(self.basePath, 'Effects'), 'Effects', 0)

        # Mount shaders under a different name to access them from the effects
        vfs.mountLoop(join(self.basePath, 'Shader'), 'ShaderMount', 0)

        # Ensure the pipeline write path exists, and if not, create it
        if not isdir(self.writePath):
            self.debug("Creating temp path, as it does not exist yet")
            try:
                os.makedirs(self.writePath, 0777)
            except Exception, msg:
                self.error("Failed to create temp path:", msg)
                import sys
                sys.exit(0)
Пример #30
0
    def write_path(self, pth):
        """ Set a writable directory for generated files. This can be a string
        path name or a multifile with openReadWrite(). If no pathname is set
        then the root directory is used.

        This feature is usually only used for debugging, the pipeline will dump
        all generated shaders and other temporary files to that directory.
        If you don't need this, you can use set_virtual_write_path(), which
        will create the temporary path in the VirtualFileSystem, thus not
        writing any files to disk. """
        if pth is None:
            self._write_path = None
            self._lock_file = "instance.pid"
        else:
            self._write_path = Filename.from_os_specific(pth).get_fullpath()
            self._lock_file = join(self._write_path, "instance.pid")
Пример #31
0
    def _set_write_path(self, pth):
        """ Set a writable directory for generated files. This can be a string
        path name or a multifile with openReadWrite(). If no pathname is set
        then the root directory is used.

        This feature is usually only used for debugging, the pipeline will dump
        all generated shaders and other temporary files to that directory.
        If you don't need this, you can use set_virtual_write_path(), which
        will create the temporary path in the VirtualFileSystem, thus not
        writing any files to disk. """
        if pth is None:
            self._write_path = None
            self._lock_file = "instance.pid"
        else:
            self._write_path = Filename.from_os_specific(pth).get_fullpath()
            self._lock_file = join(self._write_path, "instance.pid")
Пример #32
0
    def _writeDebugShader(self, name, content):
        """ Internal method to dump shader for debugging """

        if not self._DumpShaders:
            return

        cachePath = "PipelineTemp"
        if not isdir(cachePath):
            print "Cache path does not exist!:", cachePath
            print "Disabling shader dump"
            self._DumpShaders = False
            return

        writeName = name.strip().replace("/", "-").replace(".", "_") + ".bin"

        with open(join(cachePath, writeName), "w") as handle:
            handle.write(str(content))
Пример #33
0
    def _writeDebugShader(self, name, content):
        """ Internal method to dump shader for debugging """

        if not self._DumpShaders:
            return

        cachePath = "PipelineTemp"
        if not isdir(cachePath):
            print "Cache path does not exist!:", cachePath
            print "Disabling shader dump"
            self._DumpShaders = False
            return

        writeName = name.strip().replace("/", "-").replace(".", "_") + ".bin"

        with open(join(cachePath, writeName), "w") as handle:
            handle.write(str(content))
Пример #34
0
    def load_plugin_settings(self, plugin_id, plugin_pth):
        """ Internal method to load all settings of a plugin, given its plugin
        id and path to the plugin base directory """
        config_file = join(plugin_pth, "config.yaml")
        config = load_yaml_file(config_file)
        # When you don't specify anything in the settings, instead of
        # returning an empty dictionary, pyyaml returns None
        config["settings"] = config["settings"] or []
        config["daytime_settings"] = config["daytime_settings"] or []

        settings = collections.OrderedDict(
            [(k, make_setting_from_data(v)) for k, v in config["settings"]])
        self.settings[plugin_id] = settings

        if self.requires_daytime_settings:
            daysettings = collections.OrderedDict(
                [(k, make_daysetting_from_data(v)) for k, v in config["daytime_settings"]])
            self.day_settings[plugin_id] = daysettings
Пример #35
0
    def load(self, filename):
        """ Loads a profile from a given filename and returns the internal
        used index which can be assigned to a light."""

        # Make sure the user can load profiles directly from the ies profile folder
        data_path = join("/$$rp/rpcore/data/ies_profiles/", filename)
        if isfile(data_path):
            filename = data_path

        # Make filename unique
        fname = Filename.from_os_specific(filename)
        if not VirtualFileSystem.get_global_ptr().resolve_filename(
                fname,
                get_model_path().get_value(), "ies"):
            self.error("Could not resolve", filename)
            return -1
        fname = fname.get_fullpath()

        # Check for cache entries
        if fname in self._entries:
            return self._entries.index(fname)

        # Check for out of bounds
        if len(self._entries) >= self._max_entries:
            # TODO: Could remove unused profiles here or regenerate texture
            self.warn(
                "Cannot load IES Profile, too many loaded! (Maximum: 32)")

        # Try loading the dataset, and see what happes
        try:
            dataset = self._load_and_parse_file(fname)
        except InvalidIESProfileException as msg:
            self.warn("Failed to load profile from", filename, ":", msg)
            return -1

        if not dataset:
            return -1

        # Dataset was loaded successfully, now copy it
        dataset.generate_dataset_texture_into(self._storage_tex,
                                              len(self._entries))
        self._entries.append(fname)

        return len(self._entries) - 1
Пример #36
0
    def load_plugin_settings(self, plugin_id, plugin_pth):
        """ Internal method to load all settings of a plugin, given its plugin
        id and path to the plugin base directory """
        config_file = join(plugin_pth, "config.yaml")
        config = load_yaml_file(config_file)
        # When you don't specify anything in the settings, instead of
        # returning an empty dictionary, pyyaml returns None
        config["settings"] = config["settings"] or []
        config["daytime_settings"] = config["daytime_settings"] or []

        settings = collections.OrderedDict([(k, make_setting_from_data(v))
                                            for k, v in config["settings"]])
        self.settings[plugin_id] = settings

        if self.requires_daytime_settings:
            daysettings = collections.OrderedDict([
                (k, make_daysetting_from_data(v))
                for k, v in config["daytime_settings"]
            ])
            self.day_settings[plugin_id] = daysettings
Пример #37
0
    def load(self, filename):
        """ Loads a profile from a given filename and returns the internal
        used index which can be assigned to a light."""

        # Make sure the user can load profiles directly from the ies profile folder
        data_path = join("/$$rp/data/ies_profiles/", filename)
        if isfile(data_path):
            filename = data_path

        # Make filename unique
        fname = Filename.from_os_specific(filename)
        if not VirtualFileSystem.get_global_ptr().resolve_filename(
                fname, get_model_path().get_value(), "ies"):
            self.error("Could not resolve", filename)
            return -1
        fname = fname.get_fullpath()

        # Check for cache entries
        if fname in self._entries:
            return self._entries.index(fname)

        # Check for out of bounds
        if len(self._entries) >= self._max_entries:
            # TODO: Could remove unused profiles here or regenerate texture
            self.warn("Cannot load IES Profile, too many loaded! (Maximum: 32)")

        # Try loading the dataset, and see what happes
        try:
            dataset = self._load_and_parse_file(fname)
        except InvalidIESProfileException as msg:
            self.warn("Failed to load profile from", filename, ":", msg)
            return -1

        if not dataset:
            return -1

        # Dataset was loaded successfully, now copy it
        dataset.generate_dataset_texture_into(self._storage_tex, len(self._entries))
        self._entries.append(fname)

        return len(self._entries) - 1
Пример #38
0
    def load_plugin_settings(self, plugin_id, plugin_pth):
        """ Internal method to load all settings of a plugin, given its plugin
        id and path to the plugin base directory """
        config_file = join(plugin_pth, "config.yaml")
        config = load_yaml_file(config_file)
        # When you don't specify anything in the settings, instead of
        # returning an empty dictionary, pyyaml returns None
        config["settings"] = config["settings"] or []
        config["daytime_settings"] = config["daytime_settings"] or []

        if isinstance(config["settings"], dict) or isinstance(config["daytime_settings"], dict) or \
            (config["settings"] and len(config["settings"][0]) != 2) or \
            (config["daytime_settings"] and len(config["daytime_settings"][0]) != 2):
            self.error("Malformed config for plugin", plugin_id, "- did you miss '!!omap' ?")

        settings = collections.OrderedDict(
            [(k, make_setting_from_data(v)) for k, v in config["settings"]])
        self.settings[plugin_id] = settings

        if self.requires_daytime_settings:
            daysettings = collections.OrderedDict(
                [(k, make_daysetting_from_data(v)) for k, v in config["daytime_settings"]])
            self.day_settings[plugin_id] = daysettings
Пример #39
0
 def _find_basepath(self):
     """ Attempts to find the pipeline base path by looking at the location
     of this file """
     pth = os.path.abspath(join(os.path.dirname(os.path.realpath(__file__)), ".."))
     return Filename.from_os_specific(pth).get_fullpath()
Пример #40
0
# This file includes all modules from the native module.

from __future__ import print_function
import sys
from os.path import dirname, realpath

from direct.stdpy.file import join, isfile
from rpcore.rpobject import RPObject

# Store a global flag, indicating whether the C++ modules were loaded or the python
# implemetation of them
NATIVE_CXX_LOADED = False

# Read the configuration from the flag-file
current_path = dirname(realpath(__file__))
cxx_flag_path = join(current_path, "use_cxx.flag")
if not isfile(cxx_flag_path):
    RPObject.global_error("CORE", "Could not find cxx flag, please run the setup.py!")
    sys.exit(1)
else:
    with open(join(current_path, "use_cxx.flag"), "r") as handle:
        NATIVE_CXX_LOADED = handle.read().strip() == "1"

# The native module should only be imported once, and that by the internal pipeline code
assert __package__ == "rpcore.native", "You have included the pipeline in the wrong way!"

# Classes which should get imported
classes_to_import = [
    "GPUCommand",
    "GPUCommandList",
    "ShadowManager",
Пример #41
0
 def get_shader_resource(self, pth):
     """ Converts a local path from the plugins shader directory into
     an absolute path """
     return join(self.base_path, "shader", pth)
Пример #42
0
def exportTile(dstDir,name,tile):
    tile.writeBamFile(join(dstDir,name+".bam"))
Пример #43
0
        if not isdir(self.writePath):
            self.debug("Creating temp path, as it does not exist yet")
            try:
                os.makedirs(self.writePath, 0777)
            except Exception, msg:
                self.error("Failed to create temp path:",msg)
                import sys
                sys.exit(0)

        # Mount the pipeline temp path
        self.debug("Mounting",self.writePath,"as PipelineTemp/")
        vfs.mountLoop(self.writePath, 'PipelineTemp/', 0)

        # #pragma include "something" searches in current directory first, 
        # and then on the model-path. Append the Shader directory to the modelpath
        # to ensure the shader includes can be found.
        base_path = Filename(self.basePath)
        getModelPath().appendDirectory(join(base_path.getFullpath(), 'Shader'))

        # Add the pipeline root directory to the model path aswell
        getModelPath().appendDirectory(base_path.getFullpath())

        # Append the write path to the model directory to make pragma include 
        # find the ShaderAutoConfig.include
        write_path = Filename(self.writePath)
        getModelPath().appendDirectory(write_path.getFullpath())

    def unmount(self):
        """ Unmounts the VFS """
        raise NotImplementedError()
Пример #44
0
    "", """
win-title Voxelizer - Show Voxels
sync-video #f
notify-level-pnmimage error
show-buffers #f
win-size 800 600
texture-cache #f
model-cache 
model-cache-dir 
model-cache-textures #f 
multisamples 16
""")

import direct.directbase.DirectStart

resultFile = join(scenePath, "voxels.png")
configFile = join(scenePath, "voxels.ini")
resultEgg = eggPath

if showOriginalModel:
    print "Loading model from", resultEgg
    model = loader.loadModel(resultEgg)
    model.flattenStrong()
    model.reparentTo(render)

print "Loading Voxel Grid from", resultFile
tex = loader.loadTexture(resultFile)

# Load config file
with open(configFile, "r") as handle:
    configContent = handle.readlines()
Пример #45
0
cwd = getcwd().replace("\\", "/").rstrip("/")

ignoreFiles = []


def checkIgnore(source):
    for f in ignoreFiles:
        if f.lower() in source.lower():
            return False
    return True


allSources = [
    i for i in listdir("Source")
    if isfile(join("Source", i)) and checkIgnore(i) and i.endswith(".h")
]

allSourcesStr = ' '.join(['"' + i + '"' for i in allSources])

print("\nRunning interrogate ..")

command = PANDA_BIN + \
    "/interrogate -D__inline -DCPPPARSER -DP3_INTERROGATE=1 -D__cplusplus "
command += "-fnames -string -refcount -assert "
command += "-Dvolatile= "
command += "-DWIN32= "
command += "-DWIN32_VC= "
command += "-D_WINDOWS= "
command += "-S" + PANDA_INCLUDE + "/parser-inc "
command += "-S" + PANDA_INCLUDE + "/ "
Пример #46
0
    def mount(self):
        """ Inits the VFS Mounts """

        self.debug("Setting up virtual filesystem.")
        self._mounted = True
        vfs = VirtualFileSystem.get_global_ptr()

        # Mount data and models
        dirs_to_mount = ["Data", "Effects", "Plugins", "Shader"]
        for directory in dirs_to_mount:
            vfs.mount_loop(join(self._base_path, directory), directory, 0)

        if isdir(join(self._base_path, "Models")):
            vfs.mount_loop(join(self._base_path, 'Models'), 'Models', 0)

        # Mount config dir
        if self._config_dir is None:
            config_dir = join(self._base_path, "Config/")
            vfs.mount_loop(config_dir, "$$Config/", 0)
            self.debug("Auto-Detected config dir:", config_dir)
        else:
            vfs.mount_loop(self._config_dir, "$$Config/", 0)
            self.debug("Config dir:", self._config_dir)


        # Convert the base path to something the os can work with
        sys_base_path = Filename(self._base_path).to_os_specific()

        # Add plugin folder to the include path
        sys.path.insert(0, join(sys_base_path, 'Plugins'))

        # Add current folder to the include path
        sys.path.insert(0, sys_base_path)

        # Mount the pipeline temp path:
        # If no write path is specified, use a virtual ramdisk
        if self._write_path is None:
            self.debug("Mounting ramdisk as $$PipelineTemp/")
            vfs.mount(VirtualFileMountRamdisk(), "$$PipelineTemp/", 0)
        else:
            # In case an actual write path is specified:
            # Ensure the pipeline write path exists, and if not, create it
            if not isdir(self._write_path):
                self.debug("Creating temp path, it does not exist yet")
                try:
                    os.makedirs(self._write_path)
                except IOError as msg:
                    self.fatal("Failed to create temp path:", msg)
            self.debug("Mounting", self._write_path, "as $$PipelineTemp/")
            vfs.mount_loop(self._write_path, '$$PipelineTemp/', 0)

        # #pragma include "something" searches in current directory first,
        # and then on the model-path. Append the Shader directory to the
        # modelpath to ensure the shader includes can be found.
        self._model_paths.append(join(self._base_path, "Shader"))

        # Add the pipeline root directory to the model path as well
        self._model_paths.append(self._base_path)
        self._model_paths.append(".")

        # Append the write path to the model directory to make pragma include
        # find the ShaderAutoConfig.include
        self._model_paths.append("$$PipelineTemp")

        # Add the plugins dir to the model path so plugins can include their
        # own resources more easily
        self._model_paths.append(join(self._base_path, "Plugins"))

        # Write the model paths to the global model path
        for pth in self._model_paths:
            get_model_path().append_directory(pth)
def exportTile(dstDir,name,tile):
    tile.writeBamFile(join(dstDir,name+".bam"))
Пример #48
0
PANDA_INCLUDE = sys.argv[3]



cwd = getcwd().replace("\\", "/").rstrip("/")

ignoreFiles = []

def checkIgnore(source):
    for f in ignoreFiles:
        if f.lower() in source.lower():
            return False
    return True


allSources = [i for i in listdir("Source") if isfile(join("Source", i)) and checkIgnore(i) and i.endswith(".h") ]


allSourcesStr = ' '.join(['"' + i + '"' for i in allSources])

print "\nRunning interrogate .."

command = PANDA_BIN + \
    "/interrogate -D__inline -DCPPPARSER -DP3_INTERROGATE=1 -D__cplusplus "
command += "-fnames -string -refcount -assert "
command += "-Dvolatile= "
command += "-S" + PANDA_INCLUDE + "/parser-inc "
command += "-S" + PANDA_INCLUDE + "/ "

# command += "-I" + PANDA_BUILT + "/bin/include/ "
command += "-oc Source/InterrogateModule.cxx "
Пример #49
0
 def get_shader_resource(self, pth):
     """ Converts a local path from the plugins shader directory into
     an absolute path """
     return join(self.base_path, "shader", pth)
Пример #50
0
 def _find_basepath(self):
     """ Attempts to find the pipeline base path by looking at the location
     of this file """
     pth = os.path.abspath(join(os.path.dirname(os.path.realpath(__file__)), "../.."))
     return Filename.from_os_specific(pth).get_fullpath()
Пример #51
0
            try:
                os.makedirs(self.writePath, 0777)
            except Exception, msg:
                self.error("Failed to create temp path:", msg)
                import sys
                sys.exit(0)

        # Mount the pipeline temp path
        self.debug("Mounting", self.writePath, "as PipelineTemp/")
        vfs.mountLoop(self.writePath, 'PipelineTemp/', 0)

        # #pragma include "something" searches in current directory first,
        # and then on the model-path. Append the Shader directory to the modelpath
        # to ensure the shader includes can be found.
        base_path = Filename(self.basePath)
        self.modelPaths.append(join(base_path.getFullpath(), 'Shader'))

        # Add the pipeline root directory to the model path aswell
        self.modelPaths.append(base_path.getFullpath())

        # Append the write path to the model directory to make pragma include
        # find the ShaderAutoConfig.include
        write_path = Filename(self.writePath)
        self.modelPaths.append(write_path.getFullpath())

        for pth in self.modelPaths:
            getModelPath().appendDirectory(pth)

    def unmount(self):
        """ Unmounts the VFS """
        raise NotImplementedError()
Пример #52
0
    def _handleIncludes(self, source):
        """ Internal (recursive) method to parse #include's """

        with open(source, "r") as handle:
            content = handle.readlines()

        newContent = ""
        includeIdentifier = "#include "

        ID = self._ShaderIDs.get(source, None)
        if ID is None:
            ID = self._NextID
            self._ShaderIDs[source] = ID
            # print ID, source
            self._NextID += 1

        newContent += "#line 1 %d\n" % (ID)

        # Iterate through lines
        for line_idx, line in enumerate(content):
            lineStrip = line.strip()
            if lineStrip.startswith(includeIdentifier):
                includePart = lineStrip[len(includeIdentifier):].strip()

                # Filename is surrounded by braces
                if includePart.startswith('"') and includePart.endswith('"'):

                    # Special case
                    if includePart == '"%ShaderAutoConfig%"':
                        properIncludePart = "PipelineTemp/ShaderAutoConfig.include"
                    else:
                        # Extract include part
                        properIncludePart = Filename.fromOsSpecific(join(
                            self._GlobalShaderPath, includePart[1:-1])).toOsGeneric()

                    # And check if file exists
                    if isfile(properIncludePart):

                        # Check for recursive includes
                        if properIncludePart in self._GlobalIncludeStack:
                            # print "BetterShader: Ignoring recursive
                            # include:",properIncludePart
                            pass

                        else:
                            self._GlobalIncludeStack.append(properIncludePart)
                            newContent += "\n// FILE: '" + \
                                str(properIncludePart) + "' \n"

                            newContent += self._handleIncludes(
                                properIncludePart).strip() + "\n"

                            newContent += "#line %d %d\n" % (line_idx + 3, ID)

                    else:
                        print "BetterShader: Failed to load '" + str(properIncludePart) + "'!"
                else:
                    print "BetterShader: Invalid include:", includePart

                continue

            newContent += line.rstrip() + "\n"

        return newContent
Пример #53
0
    def setup(self):
        """ Setups everything for the GI to work """
        self.debug("Setup ..")

        if self.pipeline.settings.useHardwarePCF:
            self.error(
                "Global Illumination does not work in combination with PCF!")
            import sys
            sys.exit(0)
            return

        

        self.settings = VoxelSettingsManager()
        self.settings.loadFromFile(join(self.sceneRoot, "voxels.ini"))

        self.debug(
            "Loaded voxels, grid resolution is", self.settings.GridResolution)

        self.gridScale = self.settings.GridEnd - self.settings.GridStart
        self.voxelSize = self.gridScale / float(self.settings.GridResolution)
        self.entrySize = Vec2(
            1.0 / float(self.settings.StackSizeX), 1.0 / float(self.settings.StackSizeY))
        self.frameIndex = 0

        invVoxelSize = Vec3(
            1.0 / self.voxelSize.x, 1.0 / self.voxelSize.y, 1.0 / self.voxelSize.z)
        invVoxelSize.normalize()
        self.normalizationFactor = invVoxelSize / float(self.settings.GridResolution)

        # Debugging of voxels, VERY slow
        self.debugVoxels = False

        if self.debugVoxels:
            self.createVoxelDebugBox()

        # Load packed voxels
        packedVoxels = Globals.loader.loadTexture(
            join(self.sceneRoot, "voxels.png"))
        packedVoxels.setFormat(Texture.FRgba8)
        packedVoxels.setComponentType(Texture.TUnsignedByte)
        # packedVoxels.setKeepRamImage(False)

        # Create 3D Texture to store unpacked voxels
        self.unpackedVoxels = Texture("Unpacked voxels")
        self.unpackedVoxels.setup3dTexture(self.settings.GridResolution, self.settings.GridResolution, self.settings.GridResolution,
                                           Texture.TFloat, Texture.FRgba8)
        self.unpackedVoxels.setMinfilter(Texture.FTLinearMipmapLinear)
        self.unpackedVoxels.setMagfilter(Texture.FTLinear)

        self.unpackVoxels = NodePath("unpackVoxels")
        self.unpackVoxels.setShader(
            BetterShader.loadCompute("Shader/GI/UnpackVoxels.compute"))
        self.unpackVoxels.setShaderInput("packedVoxels", packedVoxels)
        self.unpackVoxels.setShaderInput(
            "stackSizeX", LVecBase3i(self.settings.StackSizeX))
        self.unpackVoxels.setShaderInput("gridSize", LVecBase3i(self.settings.GridResolution))
        self.unpackVoxels.setShaderInput("destination", self.unpackedVoxels)
        self._executeShader(
            self.unpackVoxels, self.settings.GridResolution / 8, self.settings.GridResolution / 8, self.settings.GridResolution / 8)

        # Create 3D Texture to store direct radiance
        self.directRadiance = Texture("Direct radiance")
        self.directRadiance.setup3dTexture(self.settings.GridResolution, self.settings.GridResolution, self.settings.GridResolution,
                                           Texture.TFloat, Texture.FRgba8)

        for prepare in [self.directRadiance, self.unpackedVoxels]:
            prepare.setMagfilter(Texture.FTLinear)
            prepare.setMinfilter(Texture.FTLinearMipmapLinear)
            prepare.setWrapU(Texture.WMBorderColor)
            prepare.setWrapV(Texture.WMBorderColor)
            prepare.setWrapW(Texture.WMBorderColor)
            prepare.setBorderColor(Vec4(0))

        self.populateVPLNode = NodePath("PopulateVPLs")
        self.clearTextureNode = NodePath("ClearTexture")
        self.copyTextureNode = NodePath("CopyTexture")
        self.generateMipmapsNode = NodePath("GenerateMipmaps")

        # surroundingBox = Globals.loader.loadModel(
        #     "Demoscene.ignore/CubeOpen/Model.egg")
        # surroundingBox.setPos(self.gridStart)
        # surroundingBox.setScale(self.gridScale)
        # surroundingBox.reparentTo(Globals.render)

        self.bindTo(self.populateVPLNode, "giData")
        self.reloadShader()

        self._generateMipmaps(self.unpackedVoxels)
Пример #54
0
    def mount(self):
        """ Inits the VFS Mounts. This creates the following virtual directory
        structure, from which all files can be located:

        /$$rp/  (Mounted from the render pipeline base directory)
           + rpcore/
           + shader/
           + ...

        /$rpconfig/ (Mounted from config/, may be set by user)
           + pipeline.yaml
           + ...

        /$$rptemp/ (Either ramdisk or user specified)
            + day_time_config
            + shader_auto_config
            + ...

        /$$rpshader/ (Link to /$$rp/rpcore/shader)

         """
        self.debug("Setting up virtual filesystem")
        self._mounted = True

        def convert_path(pth):
            return Filename.from_os_specific(pth).get_fullpath()
        vfs = VirtualFileSystem.get_global_ptr()

        # Mount config dir as $$rpconf
        if self._config_dir is None:
            config_dir = convert_path(join(self._base_path, "config/"))
            self.debug("Mounting auto-detected config dir:", config_dir)
            vfs.mount(config_dir, "/$$rpconfig", 0)
        else:
            self.debug("Mounting custom config dir:", self._config_dir)
            vfs.mount(convert_path(self._config_dir), "/$$rpconfig", 0)

        # Mount directory structure
        vfs.mount(convert_path(self._base_path), "/$$rp", 0)
        vfs.mount(convert_path(join(self._base_path, "rpcore/shader")), "/$$rp/shader", 0)
        vfs.mount(convert_path(join(self._base_path, "effects")), "effects", 0)

        # Mount the pipeline temp path:
        # If no write path is specified, use a virtual ramdisk
        if self._write_path is None:
            self.debug("Mounting ramdisk as /$$rptemp")
            vfs.mount(VirtualFileMountRamdisk(), "/$$rptemp", 0)
        else:
            # In case an actual write path is specified:
            # Ensure the pipeline write path exists, and if not, create it
            if not isdir(self._write_path):
                self.debug("Creating temporary path, since it does not exist yet")
                try:
                    os.makedirs(self._write_path)
                except IOError as msg:
                    self.fatal("Failed to create temporary path:", msg)
            self.debug("Mounting", self._write_path, "as /$$rptemp")
            vfs.mount(convert_path(self._write_path), '/$$rptemp', 0)

        get_model_path().prepend_directory("/$$rp")
        get_model_path().prepend_directory("/$$rp/shader")
        get_model_path().prepend_directory("/$$rptemp")
Пример #55
0
    def __init__(self):
        load_prc_file_data(
            "", """
            textures-power-2 none
            window-type offscreen
            win-size 100 100
            gl-coordinate-system default
            notify-level-display error
            print-pipe-types #f
        """)

        ShowBase.__init__(self)

        base_path = realpath(dirname(__file__))
        os.chdir(base_path)
        filter_dir = join(base_path, "tmp/")
        if isdir(filter_dir):
            shutil.rmtree(filter_dir)
        os.makedirs(filter_dir)

        source_path = join(base_path, "source")
        extension = ".jpg"
        if isfile(join(source_path, "1.png")):
            extension = ".png"

        cubemap = self.loader.loadCubeMap(
            Filename.from_os_specific(join(source_path, "#" + extension)))
        mipmap, size = -1, 1024

        cshader = Shader.load_compute(Shader.SL_GLSL, "filter.compute.glsl")

        while size > 1:
            size = size // 2
            mipmap += 1
            print("Filtering mipmap", mipmap)

            dest_cubemap = Texture("Dest")
            dest_cubemap.setup_cube_map(size, Texture.T_float,
                                        Texture.F_rgba16)
            node = NodePath("")

            for i in range(6):
                node.set_shader(cshader)
                node.set_shader_input("SourceTex", cubemap)
                node.set_shader_input("DestTex", dest_cubemap)
                node.set_shader_input("currentSize", size)
                node.set_shader_input("currentMip", mipmap)
                node.set_shader_input("currentFace", i)
                attr = node.get_attrib(ShaderAttrib)
                self.graphicsEngine.dispatch_compute(
                    ((size + 15) // 16, (size + 15) // 16, 1), attr,
                    self.win.gsg)

            print(" Extracting data ..")

            self.graphicsEngine.extract_texture_data(dest_cubemap,
                                                     self.win.gsg)

            print(" Writing data ..")
            dest_cubemap.write(join(filter_dir, "{}-#.png".format(mipmap)), 0,
                               0, True, False)

        print("Reading in data back in ..")
        tex = self.loader.loadCubeMap(Filename.from_os_specific(
            join(base_path, "tmp/#-#.png")),
                                      readMipmaps="True")

        print("Writing txo ..")
        tex.write("cubemap.txo.pz")

        shutil.rmtree(join(base_path, "tmp"))