def lookupPath(self, realPath): """ Returns the _files lookup path for specified real path. """ if canonicalPath(realPath) == canonicalPath(self.dataPath): return '' return os.path.normpath(os.path.relpath(realPath, self.dataPath)).replace('\\', '/')
def setUVMap(self, uvMap): self._uvMap = getFilePath(uvMap, self.filepath) from getpath import getSysDataPath, canonicalPath if self._uvMap and \ canonicalPath(self._uvMap) == canonicalPath(getSysDataPath('uvs/default.mhuv')): # uvs/default.mhuv is a meta-file that refers to the default uv set self._uvMap = None
def _getProxyIndex(self, mhcloFile): """ Get the index of specified mhclopath within the list returned by getSelection() Returns None if the proxy of specified path is not in selection. """ for pIdx, p in enumerate(self.getSelection()): if getpath.canonicalPath(p.file) == getpath.canonicalPath(mhcloFile): return pIdx return None
def getPreview(self, filename): # TODO this makes filechooser loading quite slow for materials without a thumbnail, but it does provide a preview thumb = super(MhmatFileLoader, self).getPreview(filename) if getpath.canonicalPath(thumb) == getpath.canonicalPath(self.fileChooser.notFoundImage): import material mat = material.fromFile(filename) if mat.diffuseTexture: return mat.diffuseTexture return thumb
def _getProxyIndex(self, mhcloFile): """ Get the index of specified mhclopath within the list returned by getSelection() Returns None if the proxy of specified path is not in selection. """ for pIdx, p in enumerate(self.getSelection()): if getpath.canonicalPath( p.file) == getpath.canonicalPath(mhcloFile): return pIdx return None
def setValue(self, value, skipDependencies = False): value = self.clampValue(value) if value != 0: self.compileTargetIfNecessary() self.human.setDetail(canonicalPath(self.fullName), value)
def getMetadata(self, filename): """Retrieves the metadata of a specified file. Updates the cache if needed. """ if self._filecache is None: # Init cache self.loadCache() self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), False) fileId = getpath.canonicalPath(filename) if fileId not in self._filecache._cache: # Lazily update cache self.updateFileCache(self.getSearchPaths() + [os.path.dirname(fileId)], self.getFileExtensions(), False) if fileId in self._filecache: metadata = self._filecache[fileId] if metadata is not None: mtime = metadata[0] metadata = metadata[1:] if mtime < os.path.getmtime(self.getMetadataFile(fileId)): # Queried file was updated, update stale cache self.updateFileCache(self.getSearchPaths() + [os.path.dirname(fileId)], self.getFileExtensions(), False) metadata = self._filecache[fileId] mtime = metadata[0] metadata = metadata[1:] return metadata else: log.warning('Could not get metadata for file %s. Does not exist in cache.', filename) return None
def findProxyMetadataByFilename(self, path): """ Retrieve proxy metadata by canonical path from metadata cache. Returns None or metadata in the form: (mtime, uuid, tags) """ proxyId = getpath.canonicalPath(path) if self._proxyFileCache is None: self.loadProxyFileCache() if self._proxyFilePerUuid is None: self._loadUuidLookup() if proxyId not in self._proxyFileCache: # Try again once more, but update the metadata cache first (lazy cache for performance reasons) self.updateProxyFileCache() self._loadUuidLookup() if proxyId not in self._proxyFileCache: log.warning( 'Could not get metadata for proxy with filename %s. Does not exist in %s library.', proxyId, self.proxyName) return None metadata = self._proxyFileCache[proxyId] mtime = metadata[0] if mtime < os.path.getmtime(proxyId): # Queried file was updated, update stale cache self.updateProxyFileCache() self._loadUuidLookup() metadata = self._proxyFileCache[proxyId] return metadata
def findProxyMetadataByFilename(self, path): """ Retrieve proxy metadata by canonical path from metadata cache. Returns None or metadata in the form: (mtime, uuid, tags) """ proxyId = getpath.canonicalPath(path) if self._filecache is None: # Init cache self.loadCache() self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), True) if self._proxyFilePerUuid is None: self._loadUuidLookup() if proxyId not in self._filecache: # Try again once more, but update the metadata cache first (lazy cache for performance reasons) self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), True) self._loadUuidLookup() if proxyId not in self._filecache: log.warning('Could not get metadata for proxy with filename %s. Does not exist in %s library.', proxyId, self.proxyName) return None metadata = self._filecache[proxyId] mtime = metadata[0] if mtime < os.path.getmtime(proxyId): # Queried file was updated, update stale cache self.updateFileCache(self.getSearchPaths(), self.getFileExtensions(), True) self._loadUuidLookup() metadata = self._filecache[proxyId] return metadata
def selectProxy(self, mhclofile): """ Called when a new proxy has been selected. If this library selects only a single proxy, specifying None as mhclofile parameter will deselect the current proxy and set the selection to "none". If this library allows selecting multiple proxies, specifying None as mhclofile will have no effect. """ if not mhclofile: if self.multiProxy: return else: self.deselectProxy(None) return log.message('Selecting proxy file "%s" from %s library.', mhclofile, self.proxyName) human = self.human pxy = None mhcloId = getpath.canonicalPath(mhclofile) if mhcloId in self._proxyCache: pxy = self._proxyCache[mhcloId] if pxy.mtime < os.path.getmtime(mhclofile): pxy = None if not pxy: pxy = proxy.loadProxy(human, mhclofile, type=self.proxyName.capitalize()) self._proxyCache[mhcloId] = pxy if pxy.uuid in [p.uuid for p in self.getSelection() if p is not None]: log.debug("Proxy with UUID %s (%s) already loaded in %s library. Skipping.", pxy.uuid, pxy.file, self.proxyName) return if not self.multiProxy and self.isProxySelected(): # Deselect previously selected proxy self.deselectProxy(None, suppressSignal = True) mesh,obj = pxy.loadMeshAndObject(human) mesh.setPickable(True) # Allow mouse picking for proxies attached to human if not mesh: return gui3d.app.addObject(obj) self.filechooser.selectItem(mhclofile) self.adaptProxyToHuman(pxy, obj) obj.setSubdivided(human.isSubdivided()) # Copy subdivided state of human # Add to selection self.selectedProxies.append(pxy) self.filechooser.selectItem(mhclofile) self.proxySelected(pxy) self.signalChange()
def setValue(self, value, skipDependencies=False): value = self.clampValue(value) if value != 0: self.compileTargetIfNecessary() self.human.setDetail(canonicalPath(self.fullName), value)
def refreshCachedTarget(targetPath): """ Invalidate the cache for the specified target, so that it will be reloaded next time it is requested. Generally this only has effect if the target was loaded from an ascii file, not from npz archive. """ targetPath = canonicalPath(targetPath) if targetPath in _targetBuffer: del _targetBuffer[targetPath]
def abspath(path): """ Helper function to determine canonical path if a valid (not None) pathname is specified. Canonical pathnames are used for reliable comparison of two paths. """ if path: return getpath.canonicalPath(path) else: return None
def compileTargetIfNecessary(self): # TODO find out when compile is needed #if alreadyCompiled: # return target = self.compileWarpTarget() algos3d._targetBuffer[canonicalPath(self.fullName)] = target # TODO remove direct use of the target buffer? self.human.hasWarpTargets = True if debug: log.debug("DONE %s" % target)
def getRelativeMaterialPath(self, filepath, objFile = None): """ Produce a portable path for writing to file. """ # TODO move as helper func to material module if objFile: objFile = getpath.canonicalPath(objFile) if os.path.isfile(objFile): objFile = os.path.dirname(objFile) searchPaths = [ objFile ] else: searchPaths = [] return getpath.getJailedPath(filepath, searchPaths)
def fromFile(self, filepath, mesh=None): """ Load skeleton from json rig file. """ import json from collections import OrderedDict import getpath import os self._clear() skelData = json.load(open(filepath, 'rb'), object_pairs_hook=OrderedDict) self.name = skelData.get("name", self.name) self.version = int(skelData.get("version", 1)) self.copyright = skelData.get("copyright", "") self.description = skelData.get("description", "") self.plane_map_strategy = int(skelData.get("plane_map_strategy", 3)) for joint_name, v_idxs in skelData.get("joints", dict()).items(): if isinstance(v_idxs, list) and len(v_idxs) > 0: self.joint_pos_idxs[joint_name] = v_idxs self.planes = skelData.get("planes", dict()) # Order bones breadth-first breadthfirst_bones = [] prev_len = -1 # anti-deadlock while(len(breadthfirst_bones) != len(skelData["bones"]) and prev_len != len(breadthfirst_bones)): prev_len = len(breadthfirst_bones) for bone_name, bone_defs in skelData["bones"].items(): if bone_name not in breadthfirst_bones: if not bone_defs.get("parent", None): breadthfirst_bones.append(bone_name) elif bone_defs["parent"] in breadthfirst_bones: breadthfirst_bones.append(bone_name) if len(breadthfirst_bones) != len(skelData["bones"]): missing = [bname for bname in skelData["bones"].keys() if bname not in breadthfirst_bones] log.warning("Some bones defined in file %s could not be added to skeleton %s, because they have an invalid parent bone (%s)", filepath, self.name, ', '.join(missing)) for bone_name in breadthfirst_bones: bone_defs = skelData["bones"][bone_name] self.addBone(bone_name, bone_defs.get("parent", None), bone_defs["head"], bone_defs["tail"], bone_defs.get("rotation_plane", 0), bone_defs.get("reference",None), bone_defs.get("weights_reference",None)) self.build() if "weights_file" in skelData and skelData["weights_file"]: weights_file = skelData["weights_file"] weights_file = getpath.thoroughFindFile(weights_file, os.path.dirname(getpath.canonicalPath(filepath)), True) self.vertexWeights = VertexBoneWeights.fromFile(weights_file, mesh.getVertexCount() if mesh else None, rootBone=self.roots[0].name)
def traceStack(self, all=True): import warpmodifier log.debug("human.targetsDetailStack:") for path,value in self.targetsDetailStack.items(): try: target = algos3d._targetBuffer[canonicalPath(path)] except KeyError: target = None if target is None: stars = " ??? " elif isinstance(target, warpmodifier.WarpTarget): stars = " *** " else: stars = " " if all or path[0:4] != "data": log.debug(" %s%s: %s" % (stars, path, value))
def traceStack(self, all=True): import warpmodifier log.debug("human.targetsDetailStack:") for path, value in self.targetsDetailStack.items(): try: target = algos3d._targetBuffer[canonicalPath(path)] except KeyError: target = None if target is None: stars = " ??? " elif isinstance(target, warpmodifier.WarpTarget): stars = " *** " else: stars = " " if all or path[0:4] != "data": log.debug(" %s%s: %s" % (stars, path, value))
def getTags(self, filename=None): def _getMaterialTags(filename): return material.peekMetadata(filename) if self._matFileCache is None: # Init cache self.loadCache() self._matFileCache = filecache.updateFileCache( self.materials, 'mhmat', _getMaterialTags, self._matFileCache, False) result = set() # TODO move most of this (duplicated) logic inside a class in filecache if filename: fileId = getpath.canonicalPath(filename) if fileId not in self._matFileCache: # Lazily update cache self._matFileCache = filecache.updateFileCache( self.materials + [os.path.dirname(fileId)], 'mhmat', _getMaterialTags, self._matFileCache, False) if fileId in self._matFileCache: metadata = self._matFileCache[fileId] if metadata is not None: mtime, name, tags = metadata if mtime < os.path.getmtime(fileId): # Queried file was updated, update stale cache self._matFileCache = filecache.updateFileCache( self.materials + [os.path.dirname(fileId)], 'mhmat', _getMaterialTags, self._matFileCache, False) metadata = self._matFileCache[fileId] mtime, name, tags = metadata result = result.union(tags) else: log.warning( 'Could not get tags for material file %s. Does not exist in Material library.', filename) return result else: for (path, values) in self._matFileCache.items(): _, name, tags = values result = result.union(tags) return result
def fromFile(self, filepath, mesh=None): """ Load skeleton from json rig file. """ import json from collections import OrderedDict import getpath import os self._clear() skelData = json.load(open(filepath, 'rb'), object_pairs_hook=OrderedDict) self.name = skelData.get("name", self.name) self.version = int(skelData.get("version", 1)) self.copyright = skelData.get("copyright", "") self.description = skelData.get("description", "") for joint_name, v_idxs in skelData.get("joints", dict()).items(): if isinstance(v_idxs, list) and len(v_idxs) > 0: self.joint_pos_idxs[joint_name] = v_idxs # Order bones breadth-first breadthfirst_bones = [] prev_len = -1 # anti-deadlock while(len(breadthfirst_bones) != len(skelData["bones"]) and prev_len != len(breadthfirst_bones)): prev_len = len(breadthfirst_bones) for bone_name, bone_defs in skelData["bones"].items(): if bone_name not in breadthfirst_bones: if not bone_defs.get("parent", None): breadthfirst_bones.append(bone_name) elif bone_defs["parent"] in breadthfirst_bones: breadthfirst_bones.append(bone_name) if len(breadthfirst_bones) != len(skelData["bones"]): missing = [bname for bname in skelData["bones"].keys() if bname not in breadthfirst_bones] log.warning("Some bones defined in file %s could not be added to skeleton %s, because they have an invalid parent bone (%s)", filepath, self.name, ', '.join(missing)) for bone_name in breadthfirst_bones: bone_defs = skelData["bones"][bone_name] self.addBone(bone_name, bone_defs.get("parent", None), bone_defs["head"], bone_defs["tail"], bone_defs["roll"], bone_defs.get("reference",None), bone_defs.get("weights_reference",None)) self.build() if "weights_file" in skelData and skelData["weights_file"]: weights_file = skelData["weights_file"] weights_file = getpath.thoroughFindFile(weights_file, os.path.dirname(getpath.canonicalPath(filepath)), True) self.vertexWeights = VertexBoneWeights.fromFile(weights_file, mesh.getVertexCount() if mesh else None, rootBone=self.roots[0].name)
def getTarget(obj, targetPath): """ This function retrieves a set of translation vectors from a morphing target file and stores them in a buffer. It is usually only called if the translation vectors from this file have not yet been buffered during the current session. The translation target files contain lists of vertex indices and corresponding 3D translation vectors. The buffer is structured as a list of lists (a dictionary of dictionaries) indexed using the morph target file name, so: \"targetBuffer[targetPath] = targetData\" and targetData is a list of vectors keyed on their vertex indices. For example, a translation direction vector of [0,5.67,2.34] for vertex 345 would be stored using \"targetData[345] = [0,5.67,2.34]\". If this is taken from target file \"foo.target\", then this targetData could be assigned to the buffer with 'targetBuffer[\"c:/MH/foo.target\"] = targetData'. Parameters ---------- obj: *3d object*. The target object to which the translations are to be applied. This object is read by this function to define a list of the vertices affected by this morph target file. targetPath: *string*. The file system path to the file containing the morphing targets. The precise format of this string will be operating system dependant. """ targetPath = canonicalPath(targetPath) try: return _targetBuffer[targetPath] except KeyError: pass target = Target(obj, targetPath) _targetBuffer[targetPath] = target return target
def getTags(self, uuid=None, filename=None): """ Get tags associated with proxies. When no uuid and filename are specified, returns the all the tags found in this collection (all proxy files managed by this library). Specify a filename or uuid to get all tags belonging to that proxy file. Always returns a set of tags (so contains no duplicates), unless no proxy was found upon which None is returned. An empty library (no proxies) or a library where no proxy file contains tags will always return an empty set. """ if self._proxyFileCache is None: self.loadProxyFileCache() result = set() if uuid and filename: raise RuntimeWarning( "getTags: Specify either uuid or filename, not both!") if uuid: proxyFile = self.findProxyByUuid(uuid) if not proxyFile: log.warning( 'Could not get tags for proxy with UUID %s. Does not exist in %s library.', uuid, self.proxyName) return result elif filename: proxyId = getpath.canonicalPath(filename) if proxyId not in self._proxyFileCache: log.warning( 'Could not get tags for proxy with filename %s. Does not exist in %s library.', filename, self.proxyName) return result _, _, tags = self._proxyFileCache[proxyId] result = result.union(tags) else: for (path, values) in self._proxyFileCache.items(): _, uuid, tags = values result = result.union(tags) return result
def getTags(self, filename=None): import filecache def _getSkeletonTags(filename): return skeleton.peekMetadata(filename) if self._skelFileCache is None: # Init cache self.loadCache() self._skelFileCache = filecache.updateFileCache(self.paths, 'mhmat', _getSkeletonTags,self._skelFileCache, False) # TODO move most of this (duplicated) logic inside a class in filecache result = set() if filename: fileId = getpath.canonicalPath(filename) if fileId not in self._skelFileCache: # Lazily update cache self._skelFileCache = filecache.updateFileCache(self.paths + [os.path.dirname(fileId)], 'json', _getSkeletonTags,self._skelFileCache, False) if fileId in self._skelFileCache: metadata = self._skelFileCache[fileId] if metadata is not None: mtime, name, desc, tags = metadata if mtime < os.path.getmtime(fileId): # Queried file was updated, update stale cache self._skelFileCache = filecache.updateFileCache(self.paths + [os.path.dirname(fileId)], 'json', _getSkeletonTags,self._skelFileCache, False) metadata = self._skelFileCache[fileId] mtime, name, desc, tags = metadata result = result.union(tags) else: log.warning('Could not get tags for material file %s. Does not exist in Material library.', filename) return result else: for (path, values) in self._skelFileCache.items(): _, name, desc, tags = values result = result.union(tags) return result
def updateProxyFileCache(paths, fileExts, cache = None): """ Update cache of proxy files in the specified paths. If no cache is given as parameter, a new cache is created. This cache contains per canonical filename (key) the UUID and tags of that proxy file. Cache entries are invalidated if their modification time has changed, or no longer exist on disk. """ if cache is None: cache = dict() proxyFiles = [] entries = dict((key, True) for key in cache.keys()) # lookup dict for old entries in cache for folder in paths: proxyFiles.extend(getpath.search(folder, fileExts, recursive=True, mutexExtensions=True)) for proxyFile in proxyFiles: proxyId = getpath.canonicalPath(proxyFile) mtime = os.path.getmtime(proxyFile) if proxyId in cache: try: # Guard against doubles del entries[proxyId] # Mark that old cache entry is still valid except: pass cached_mtime = cache[proxyId][0] if not (mtime > cached_mtime): continue (uuid, tags) = peekMetadata(proxyFile) cache[proxyId] = (mtime, uuid, tags) # Remove entries from cache that no longer exist for key in entries.keys(): try: del cache[key] except: pass return cache
def getTags(self, uuid = None, filename = None): """ Get tags associated with proxies. When no uuid and filename are specified, returns the all the tags found in this collection (all proxy files managed by this library). Specify a filename or uuid to get all tags belonging to that proxy file. Always returns a set of tags (so contains no duplicates), unless no proxy was found upon which None is returned. An empty library (no proxies) or a library where no proxy file contains tags will always return an empty set. """ if self._proxyFileCache is None: self.loadProxyFileCache() result = set() if uuid and filename: raise RuntimeWarning("getTags: Specify either uuid or filename, not both!") if uuid: proxyFile = self.findProxyByUuid(uuid) if not proxyFile: log.warning('Could not get tags for proxy with UUID %s. Does not exist in %s library.', uuid, self.proxyName) return result elif filename: proxyId = getpath.canonicalPath(filename) if proxyId not in self._proxyFileCache: log.warning('Could not get tags for proxy with filename %s. Does not exist in %s library.', filename, self.proxyName) return result _, _, tags = self._proxyFileCache[proxyId] result = result.union(tags) else: for (path, values) in self._proxyFileCache.items(): _, uuid, tags = values result = result.union(tags) return result
import glmodule as gl import events3d import qtgui import eventqueue import time import makehuman import getpath from mhversion import MHVersion if False and makehuman.isBuild(): # Set absolute Qt plugin path programatically on frozen deployment to fix # crashes when Qt is on DLL PATH in windows. # No qt.conf file should be present in the application folder! deployment_path = getpath.canonicalPath(getpath.getSysPath()) QtCore.QCoreApplication.addLibraryPath( os.path.join(deployment_path, 'qt4_plugins')) # Plugins will be loaded when QCoreApplication object is constructed. Some # Qt deployments are known to prepend new library paths at this time, such # as /usr/lib/qt4/plugins on some linux platforms, but this is not a likely # case on windows platforms. # Timeout in seconds after which moving the mousewheel will pick a new mouse pos # TODO make this configureable in settings? MOUSEWHEEL_PICK_TIMEOUT = 0.5 class Modifiers: SHIFT = int(QtCore.Qt.ShiftModifier) CTRL = int(QtCore.Qt.ControlModifier)
def getMetadata(self, filename): """Retrieve a metadata entry from this cache""" fileId = getpath.canonicalPath(filepath) return self[fileId]
def setDetail(self, name, value): name = canonicalPath(name) if value: self.targetsDetailStack[name] = value elif name in self.targetsDetailStack: del self.targetsDetailStack[name]
def getDetail(self, name): name = canonicalPath(name) return self.targetsDetailStack.get(name, 0.0)
def selectProxy(self, mhclofile): """ Called when a new proxy has been selected. If this library selects only a single proxy, specifying None as mhclofile parameter will deselect the current proxy and set the selection to "none". If this library allows selecting multiple proxies, specifying None as mhclofile will have no effect. """ if not mhclofile: if self.multiProxy: return else: self.deselectProxy(None) return log.message('Selecting proxy file "%s" from %s library.', mhclofile, self.proxyName) human = self.human pxy = None mhcloId = getpath.canonicalPath(mhclofile) if mhcloId in self._proxyCache: pxy = self._proxyCache[mhcloId] if pxy.mtime < os.path.getmtime(mhclofile): pxy = None if not pxy: pxy = proxy.loadProxy(human, mhclofile, type=self.proxyName.capitalize()) self._proxyCache[mhcloId] = pxy if pxy.uuid in [p.uuid for p in self.getSelection() if p is not None]: log.debug( "Proxy with UUID %s (%s) already loaded in %s library. Skipping.", pxy.uuid, pxy.file, self.proxyName) return if not self.multiProxy and self.isProxySelected(): # Deselect previously selected proxy self.deselectProxy(None, suppressSignal=True) mesh, obj = pxy.loadMeshAndObject(human) mesh.setPickable( True) # Allow mouse picking for proxies attached to human if not mesh: return gui3d.app.addObject(obj) self.filechooser.selectItem(mhclofile) self.adaptProxyToHuman(pxy, obj) obj.setSubdivided( human.isSubdivided()) # Copy subdivided state of human # Add to selection self.selectedProxies.append(pxy) self.filechooser.selectItem(mhclofile) self.proxySelected(pxy) self.signalChange()
from core import G import glmodule as gl import events3d import qtgui import queue import time import getpath import makehuman import getpath if False and makehuman.isBuild(): # Set absolute Qt plugin path programatically on frozen deployment to fix # crashes when Qt is on DLL PATH in windows. # No qt.conf file should be present in the application folder! deployment_path = getpath.canonicalPath(getpath.getSysPath()) QtCore.QCoreApplication.addLibraryPath(os.path.join(deployment_path,'qt4_plugins')) # Plugins will be loaded when QCoreApplication object is constructed. Some # Qt deployments are known to prepend new library paths at this time, such # as /usr/lib/qt4/plugins on some linux platforms, but this is not a likely # case on windows platforms. # Timeout in seconds after which moving the mousewheel will pick a new mouse pos # TODO make this configureable in settings? MOUSEWHEEL_PICK_TIMEOUT = 0.5 class Modifiers: SHIFT = int(QtCore.Qt.ShiftModifier) CTRL = int(QtCore.Qt.ControlModifier) ALT = int(QtCore.Qt.AltModifier) META = int(QtCore.Qt.MetaModifier)
def update(self, paths, fileExts, getMetadata, removeOldEntries=True): """ Update this cache of files in the specified paths. This cache contains per canonical filename (key) metadata of that file. The contents of this metadata, and how it is parsed from file is completely customizable. Cache entries are invalidated if their modification time has changed, or no longer exist on disk. Requires passing a method getMetadata(filename) that retrieves metadata to be stored in the cache from specified file, that should return a tuple. """ def _getOverridingFile(filepath, fileExts, mtime=None): """ Overriding happens if a file with lesser precedence has a more recent modification time. fileExts are expected to be passed in reverse order """ if mtime is None: mtime = os.path.getmtime(self.getMetadataFile(filepath)) fileExt = os.path.splitext(filepath)[1][1:].lower() i = fileExts.index(fileExt) if i != 0: for altExt in fileExts[:i]: overridepath = os.path.splitext(filepath)[0] + "." + altExt if os.path.isfile(overridepath): mtime_ = os.path.getmtime( self.getMetadataFile(overridepath)) if mtime_ > mtime: return (overridepath, mtime_) return None if not isinstance(paths, list): paths = [paths] if not isinstance(fileExts, list): fileExts = [fileExts] fileExts = [ f[1:].lower() if f.startswith('.') else f.lower() for f in fileExts ] files = [] oldEntries = dict( (key, True) for key in self._cache.keys()) # lookup dict for old entries in cache for folder in paths: files.extend( getpath.search(folder, fileExts, recursive=True, mutexExtensions=True)) for filepath in files: fileId = getpath.canonicalPath(filepath) mtime = os.path.getmtime(self.getMetadataFile(filepath)) overridepath = _getOverridingFile(filepath, list(reversed(fileExts)), mtime) if overridepath is not None: filepath, mtime = overridepath if fileId in self._cache: try: # Guard against doubles del oldEntries[ fileId] # Mark that old cache entry is still valid except: pass cached_mtime = self[fileId][0] if not (mtime > cached_mtime): continue self._cache[fileId] = (mtime, ) + getMetadata(filepath) if removeOldEntries: """Remove entries from cache that no longer exist""" for key in oldEntries.keys(): try: del self._cache[key] except: pass
def updateFileCache(paths, fileExts, getMetadata, cache=None, removeOldEntries=True): """ Update cache of files in the specified paths. If no cache is given as parameter, a new cache is created. This cache contains per canonical filename (key) metadata of that file. The contents of this metadata, and how it is parsed from file is completely customizable. Cache entries are invalidated if their modification time has changed, or no longer exist on disk. Requires passing a method getMetadata(filename) that retrieves metadata to be stored in the cache from specified file, that should return a tuple. """ def _getOverridingFile(filepath, fileExts, mtime=None): """ Overriding happens if a file with lesser precedence has a more recent modification time. fileExts are expected to be passed in reverse order """ if mtime is None: mtime = os.path.getmtime(filepath) fileExt = os.path.splitext(filepath)[1][1:].lower() i = fileExts.index(fileExt) if i != 0: for altExt in fileExts[:i]: overridepath = os.path.splitext(filepath)[0] + "." + altExt if os.path.isfile(overridepath): mtime_ = os.path.getmtime(overridepath) if mtime_ > mtime: return (overridepath, mtime_) return None if cache is None: cache = dict() if not isinstance(paths, list): paths = [paths] if not isinstance(fileExts, list): fileExts = [fileExts] fileExts = [f[1:].lower() if f.startswith(".") else f.lower() for f in fileExts] files = [] oldEntries = dict((key, True) for key in cache.keys()) # lookup dict for old entries in cache for folder in paths: files.extend(getpath.search(folder, fileExts, recursive=True, mutexExtensions=True)) for filepath in files: fileId = getpath.canonicalPath(filepath) mtime = os.path.getmtime(filepath) overridepath = _getOverridingFile(filepath, list(reversed(fileExts)), mtime) if overridepath is not None: filepath, mtime = overridepath if fileId in cache: try: # Guard against doubles del oldEntries[fileId] # Mark that old cache entry is still valid except: pass cached_mtime = cache[fileId][0] if not (mtime > cached_mtime): continue cache[fileId] = (mtime,) + getMetadata(filepath) if removeOldEntries: # Remove entries from cache that no longer exist for key in oldEntries.keys(): try: del cache[key] except: pass return cache
def selectProxy(self, mhclofile): """ Called when a new proxy has been selected. If this library selects only a single proxy, specifying None as mhclofile parameter will deselect the current proxy and set the selection to "none". If this library allows selecting multiple proxies, specifying None as mhclofile will have no effect. """ if not mhclofile: if self.multiProxy: return else: self.deselectProxy(None) return log.message('Selecting proxy file "%s" from %s library.', mhclofile, self.proxyName) human = self.human proxy = None mhcloId = getpath.canonicalPath(mhclofile) if mhcloId in self._proxyCache: proxy = self._proxyCache[mhcloId] if proxy.mtime < os.path.getmtime(mhclofile): proxy = None if not proxy: proxy = mh2proxy.readProxyFile(human.meshData, mhclofile, type=self.proxyName.capitalize()) self._proxyCache[mhcloId] = proxy if proxy.uuid in [p.uuid for p in self.getSelection()]: log.debug( "Proxy with UUID %s (%s) already loaded in %s library. Skipping.", proxy.uuid, proxy.file, self.proxyName) return if not self.multiProxy and self.isProxySelected(): # Deselect previously selected proxy self.deselectProxy(None, suppressSignal=True) mesh = files3d.loadMesh(proxy.obj_file, maxFaces=proxy.max_pole) if not mesh: log.error("Failed to load %s", proxy.obj_file) return self.filechooser.selectItem(mhclofile) mesh.material = proxy.material mesh.priority = proxy.z_depth # Set render order mesh.setCameraProjection(0) # Set to model camera mesh.setSolid( human.mesh.solid) # Set to wireframe if human is in wireframe obj = gui3d.Object(mesh, self.human.getPosition()) obj.setRotation(human.getRotation()) gui3d.app.addObject(obj) self.adaptProxyToHuman(proxy, obj) obj.setSubdivided( human.isSubdivided()) # Copy subdivided state of human # Add to selection self.selectedProxies.append(proxy) self.proxyObjects.append(obj) self.filechooser.selectItem(mhclofile) self.proxySelected(proxy, obj) self.signalChange()
def fromFile(self, filename): """ Parse .mhmat file and set as the properties of this material. """ log.debug("Loading material from file %s", filename) try: f = open(filename, "rU") except: f = None if f == None: log.error("Failed to load material from file %s.", filename) return self.filename = os.path.normpath(filename) self.filepath = os.path.dirname(self.filename) shaderConfig_diffuse = None shaderConfig_bump = None shaderConfig_normal = None shaderConfig_displacement = None shaderConfig_spec = None shaderConfig_vertexColors = None shaderConfig_transparency = None shaderConfig_ambientOcclusion = None for line in f: words = line.split() if len(words) == 0: continue if words[0] in ["#", "//"]: continue if words[0] == "name": self.name = words[1] elif words[0] == "ambientColor": self._ambientColor.copyFrom([float(w) for w in words[1:4]]) elif words[0] == "diffuseColor": self._diffuseColor.copyFrom([float(w) for w in words[1:4]]) elif words[0] == "diffuseIntensity": log.warning( 'Deprecated parameter "diffuseIntensity" specified in material %s', self.name) elif words[0] == "specularColor": self._specularColor.copyFrom([float(w) for w in words[1:4]]) elif words[0] == "specularIntensity": log.warning( 'Deprecated parameter "specularIntensity" specified in material %s', self.name) elif words[0] == "shininess": self._shininess = max(0.0, min(1.0, float(words[1]))) elif words[0] == "emissiveColor": self._emissiveColor.copyFrom([float(w) for w in words[1:4]]) elif words[0] == "opacity": self._opacity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "translucency": self._translucency = max(0.0, min(1.0, float(words[1]))) elif words[0] == "shadeless": self._shadeless = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "wireframe": self._wireframe = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "transparent": self._transparent = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "alphaToCoverage": self._alphaToCoverage = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "backfaceCull": self._backfaceCull = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "depthless": self._depthless = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "castShadows": self._castShadows = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "receiveShadows": self._receiveShadows = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "autoBlendSkin": self._autoBlendSkin = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "diffuseTexture": self._diffuseTexture = getFilePath(words[1], self.filepath) elif words[0] == "bumpmapTexture": self._bumpMapTexture = getFilePath(words[1], self.filepath) elif words[0] == "bumpmapIntensity": self._bumpMapIntensity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "normalmapTexture": self._normalMapTexture = getFilePath(words[1], self.filepath) elif words[0] == "normalmapIntensity": self._normalMapIntensity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "displacementmapTexture": self._displacementMapTexture = getFilePath( words[1], self.filepath) elif words[0] == "displacementmapIntensity": self._displacementMapIntensity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "specularmapTexture": self._specularMapTexture = getFilePath(words[1], self.filepath) elif words[0] == "specularmapIntensity": self._specularMapIntensity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "transparencymapTexture": self._transparencyMapTexture = getFilePath( words[1], self.filepath) elif words[0] == "transparencymapIntensity": self._transparencyMapIntensity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "aomapTexture": self._aoMapTexture = getFilePath(words[1], self.filepath) elif words[0] == "aomapIntensity": self._aoMapIntensity = max(0.0, min(1.0, float(words[1]))) elif words[0] == "sssEnabled": self._sssEnabled = words[1].lower() in [ "yes", "enabled", "true" ] elif words[0] == "sssRScale": self._sssRScale = max(0.0, float(words[1])) elif words[0] == "sssGScale": self._sssGScale = max(0.0, float(words[1])) elif words[0] == "sssBScale": self._sssBScale = max(0.0, float(words[1])) elif words[0] == "shader": self._shader = getShaderPath(words[1], self.filepath) elif words[0] == "uvMap": self._uvMap = getFilePath(words[1], self.filepath) from getpath import getSysDataPath, canonicalPath if self._uvMap and \ canonicalPath(self._uvMap) == canonicalPath(getSysDataPath('uvs/default.mhuv')): # uvs/default.mhuv is a meta-file that refers to the default uv set self._uvMap = None elif words[0] == "shaderParam": if len(words) > 3: self.setShaderParameter(words[1], words[2:]) else: self.setShaderParameter(words[1], words[2]) elif words[0] == "shaderDefine": self.addShaderDefine(words[1]) elif words[0] == "shaderConfig": if words[1] == "diffuse": shaderConfig_diffuse = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "bump": shaderConfig_bump = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "normal": shaderConfig_normal = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "displacement": shaderConfig_displacement = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "spec": shaderConfig_spec = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "vertexColors": shaderConfig_vertexColors = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "transparency": shaderConfig_transparency = words[2].lower() in [ "yes", "enabled", "true" ] elif words[1] == "ambientOcclusion": shaderConfig_ambientOcclusion = words[2].lower() in [ "yes", "enabled", "true" ] else: log.warning('Unknown material shaderConfig property: %s', words[1]) f.close() self.configureShading(diffuse=shaderConfig_diffuse, bump=shaderConfig_bump, normal=shaderConfig_normal, displacement=shaderConfig_displacement, spec=shaderConfig_spec, vertexColors=shaderConfig_vertexColors, transparency=shaderConfig_transparency, ambientOcclusion=shaderConfig_ambientOcclusion)