def __init__(this, path): this.path = path + '/makehuman' this.syspath = ['.' + x for x in ('/', '/lib', '/apps', '/shared', '/apps/gui', '/core', '/plugins')] #cwd = os.getcwd() #syspath = sys.path os.chdir(this.path) sys.path += this.syspath if verbose: sys.stderr.write("Probing makehuman ...\n") import core import headless import getpath import humanmodifier import log ## uncomment to disable makehuman log #log.init() #core.G.app = headless.ConsoleApp() #this.human = core.G.app.selectedHuman modifierGroups = ('head', 'forehead', 'eyebrows', 'neck', 'nose', 'mouth', 'ears', 'chin', 'cheek', 'macrodetails', 'macrodetails-universal', 'macrodetails-proportions') proxyTypes = ('hair', 'eyebrows', 'eyelashes') modifiers = humanmodifier.loadModifiers(getpath.getSysDataPath('modifiers/modeling_modifiers.json'), None) modifiers = [x for x in modifiers if x.groupName in modifierGroups and x.fullName != 'macrodetails/Caucasian'] this.symmetricalModifiers = [x for x in modifiers if x.getSymmetrySide() is None] this.rightModifiers = [x for x in modifiers if x.getSymmetrySide() == 'r'] this.leftModifiers = [x for x in modifiers if x.getSymmetrySide() == 'l'] if verbose: sys.stderr.write("Found %i symmetrical facial features\n" % len(this.symmetricalModifiers)) sys.stderr.write("Found %i left facial features\n" % len(this.leftModifiers)) sys.stderr.write("Found %i right facial features\n" % len(this.rightModifiers)) this.proxies = {} for proxyType in proxyTypes: files = getpath.search([getpath.getDataPath(proxyType),getpath.getSysDataPath(proxyType)], ['.proxy', '.mhclo'], True) files = list(files) if verbose: sys.stderr.write("Found %i %s proxies\n" % (len(files), proxyType)) this.proxies[proxyType] = files skins = getpath.search([getpath.getDataPath('skins'),getpath.getSysDataPath('skins')], ['.mhmat'], True) this.skins = list(skins) if verbose: sys.stderr.write("Found %i skins\n" % len(this.skins))
def _listDataFiles(foldername, extensions, onlySysData=False, recursive=True): import getpath if onlySysData: paths = [getpath.getSysDataPath(foldername)] else: paths = [getpath.getDataPath(foldername), getpath.getSysDataPath(foldername)] return getpath.search(paths, extensions, recursive)
def _listDataFiles(foldername, extensions, onlySysData=False, recursive=True): with mhpath: # sadly makehuman seems hardcoded if onlySysData: paths = [getpath.getSysDataPath(foldername)] else: paths = [getpath.getDataPath(foldername), getpath.getSysDataPath(foldername)] return list(getpath.search(paths, extensions, recursive))
def load(app): fileList = gp.search(gp.getPath('models'), extensions='.mhm', recursive=True) for filepath in fileList: isChanged = False hasFailed = False if os.path.isfile(filepath): with open(filepath, 'r') as f: lines = f.readlines() for idx, line in enumerate(lines): if line.startswith('tags'): if ',,' in line: data = line.split() lines[idx] = data[0] + ' ' + ';'.join([d.replace(',,', ' ') for d in data[1:]]) + '\n' isChanged = True log.message('Replacing %s: "%s" by "%s"' % (filepath, line.strip(), lines[idx].strip())) break else: break if isChanged: backup_filepath = filepath + '~' if not os.path.isfile(backup_filepath): try: shutil.copy2(filepath, backup_filepath) except: log.warning('Failed to backup %s. Changes will not be stored' % filepath) hasFailed = True else: log.warning('Backup file %s already exists.' % backup_filepath) hasFailed = True if filecmp.cmp(filepath, backup_filepath, shallow=False) and not hasFailed: log.message('Created backup file: %s' % backup_filepath) try: with open(filepath, 'w') as f: f.writelines(lines) log.message('Successfully converted: %s' % filepath) except: log.warning('Cannot write changes to %s' % filepath) hasFailed = True else: log.warning('Failed to backup %s. Changes will not be stored' % filepath) hasFailed = True if hasFailed: log.warning('Failed to convert %s. The tags line should be fixed in a text editor.' % filepath)
def _listDataFiles(foldername, extensions, onlySysData=False, recursive=True): import getpath if onlySysData: paths = [getpath.getSysDataPath(foldername)] else: paths = [ getpath.getDataPath(foldername), getpath.getSysDataPath(foldername) ] return getpath.search(paths, extensions, recursive)
def updateProxyFileCache(paths, fileExts, cache = None): """ Update cache of proxy files in the specified paths. If no cache is given as parameter, a new cache is created. This cache contains per canonical filename (key) the UUID and tags of that proxy file. Cache entries are invalidated if their modification time has changed, or no longer exist on disk. """ if cache is None: cache = dict() proxyFiles = [] entries = dict((key, True) for key in cache.keys()) # lookup dict for old entries in cache for folder in paths: proxyFiles.extend(getpath.search(folder, fileExts, recursive=True, mutexExtensions=True)) for proxyFile in proxyFiles: proxyId = getpath.canonicalPath(proxyFile) mtime = os.path.getmtime(proxyFile) if proxyId in cache: try: # Guard against doubles del entries[proxyId] # Mark that old cache entry is still valid except: pass cached_mtime = cache[proxyId][0] if not (mtime > cached_mtime): continue (uuid, tags) = peekMetadata(proxyFile) cache[proxyId] = (mtime, uuid, tags) # Remove entries from cache that no longer exist for key in entries.keys(): try: del cache[key] except: pass return cache
def update(self, paths, fileExts, getMetadata, removeOldEntries=True): """ Update this cache of files in the specified paths. This cache contains per canonical filename (key) metadata of that file. The contents of this metadata, and how it is parsed from file is completely customizable. Cache entries are invalidated if their modification time has changed, or no longer exist on disk. Requires passing a method getMetadata(filename) that retrieves metadata to be stored in the cache from specified file, that should return a tuple. """ def _getOverridingFile(filepath, fileExts, mtime=None): """ Overriding happens if a file with lesser precedence has a more recent modification time. fileExts are expected to be passed in reverse order """ if mtime is None: mtime = os.path.getmtime(self.getMetadataFile(filepath)) fileExt = os.path.splitext(filepath)[1][1:].lower() i = fileExts.index(fileExt) if i != 0: for altExt in fileExts[:i]: overridepath = os.path.splitext(filepath)[0] + "." + altExt if os.path.isfile(overridepath): mtime_ = os.path.getmtime( self.getMetadataFile(overridepath)) if mtime_ > mtime: return (overridepath, mtime_) return None if not isinstance(paths, list): paths = [paths] if not isinstance(fileExts, list): fileExts = [fileExts] fileExts = [ f[1:].lower() if f.startswith('.') else f.lower() for f in fileExts ] files = [] oldEntries = dict( (key, True) for key in self._cache.keys()) # lookup dict for old entries in cache for folder in paths: files.extend( getpath.search(folder, fileExts, recursive=True, mutexExtensions=True)) for filepath in files: fileId = getpath.canonicalPath(filepath) mtime = os.path.getmtime(self.getMetadataFile(filepath)) overridepath = _getOverridingFile(filepath, list(reversed(fileExts)), mtime) if overridepath is not None: filepath, mtime = overridepath if fileId in self._cache: try: # Guard against doubles del oldEntries[ fileId] # Mark that old cache entry is still valid except: pass cached_mtime = self[fileId][0] if not (mtime > cached_mtime): continue self._cache[fileId] = (mtime, ) + getMetadata(filepath) if removeOldEntries: """Remove entries from cache that no longer exist""" for key in oldEntries.keys(): try: del self._cache[key] except: pass
def search(self): return getpath.search(self.paths, self.extensions, recursive = not self.doNotRecurse, mutexExtensions = self.mutexExtensions)
def updateFileCache(paths, fileExts, getMetadata, cache=None, removeOldEntries=True): """ Update cache of files in the specified paths. If no cache is given as parameter, a new cache is created. This cache contains per canonical filename (key) metadata of that file. The contents of this metadata, and how it is parsed from file is completely customizable. Cache entries are invalidated if their modification time has changed, or no longer exist on disk. Requires passing a method getMetadata(filename) that retrieves metadata to be stored in the cache from specified file, that should return a tuple. """ def _getOverridingFile(filepath, fileExts, mtime=None): """ Overriding happens if a file with lesser precedence has a more recent modification time. fileExts are expected to be passed in reverse order """ if mtime is None: mtime = os.path.getmtime(filepath) fileExt = os.path.splitext(filepath)[1][1:].lower() i = fileExts.index(fileExt) if i != 0: for altExt in fileExts[:i]: overridepath = os.path.splitext(filepath)[0] + "." + altExt if os.path.isfile(overridepath): mtime_ = os.path.getmtime(overridepath) if mtime_ > mtime: return (overridepath, mtime_) return None if cache is None: cache = dict() if not isinstance(paths, list): paths = [paths] if not isinstance(fileExts, list): fileExts = [fileExts] fileExts = [f[1:].lower() if f.startswith(".") else f.lower() for f in fileExts] files = [] oldEntries = dict((key, True) for key in cache.keys()) # lookup dict for old entries in cache for folder in paths: files.extend(getpath.search(folder, fileExts, recursive=True, mutexExtensions=True)) for filepath in files: fileId = getpath.canonicalPath(filepath) mtime = os.path.getmtime(filepath) overridepath = _getOverridingFile(filepath, list(reversed(fileExts)), mtime) if overridepath is not None: filepath, mtime = overridepath if fileId in cache: try: # Guard against doubles del oldEntries[fileId] # Mark that old cache entry is still valid except: pass cached_mtime = cache[fileId][0] if not (mtime > cached_mtime): continue cache[fileId] = (mtime,) + getMetadata(filepath) if removeOldEntries: # Remove entries from cache that no longer exist for key in oldEntries.keys(): try: del cache[key] except: pass return cache