def saveRegistry(self): JSONHelper.trySaveJSON( self.portalRegistry, self.getProject().getGameConfigPath(_GII_PORTAL_DATA_NAME), 'portal data') reg = _MOCK.getScenePortalRegistry() reg.markDirty(reg)
def importAsset(self, node, reload=False): if node.isVirtual(): return #JOB: convert xls into json workbook = xlrd.open_workbook(node.getAbsFilePath()) if not workbook: logging.warn('excel version not supported for %s' % node.getFilePath()) return False data, metadata = convertWorkbookToData(workbook) if not data: logging.warn('no data converted from xls: %s' % node.getFilePath()) return False cachePath = node.getCacheFile('data') if not JSONHelper.trySaveJSON(data, cachePath): logging.warn('failed saving xls data: %s' % cachePath) return False metaCachePath = node.getCacheFile('meta_data') if not JSONHelper.trySaveJSON(metadata, metaCachePath): logging.warn('failed saving xls metadata: %s' % metaCachePath) return False node.assetType = 'data_xls' node.setObjectFile('data', cachePath) node.setObjectFile('meta_data', metaCachePath) node.groupType = 'package' for id, sheet in list(data.items()): if isinstance(id, str): node.affirmChildNode(id, 'data_sheet', manager=self) return True
def processScenePortal(self, sceneNode): if sceneNode.getType() != 'scene': return self.clearScenePortal(sceneNode.getPath()) if sceneNode.hasTag('deprecated'): return registry = self.portalRegistry sceneFilePath = sceneNode.getFilePath() if os.path.isfile(sceneFilePath): sceneData = JSONHelper.tryLoadJSON(sceneFilePath) elif os.path.isdir(sceneFilePath): sceneData = JSONHelper.tryLoadJSON(sceneFilePath + '/scene_index.json') configData = sceneData and sceneData.get('config', None) if not configData: return managerData = configData.get('managers', None) if not managerData: return scenePortalData = managerData.get('ScenePortalManager', None) if not scenePortalData: return scenePath = sceneNode.getPath() prior = sceneNode.getInheritedMetaData('scene_portal_priority', 0) for portalData in scenePortalData.get('portals', []): name = portalData['name'] fullname = portalData['fullname'] prevEntry = registry.get(fullname, None) if prevEntry: prevScene = prevEntry['scene'] if prevScene != scenePath: prior0 = prevEntry['priority'] if prior < prior0: logging.info( 'ignore duplicated portal(low priority): %s @ %s' % (fullname, scenePath)) continue elif prior == prior0: #TODO: warn about duplicated portal logging.warning( 'duplicated portal ID: %s, found in %s and %s' % (fullname, scenePath, prevScene)) continue newEntry = { 'fullname': fullname, 'name': name, 'scene': scenePath, 'data': portalData, 'priority': prior } registry[fullname] = newEntry logging.info('add portal: %s @ %s' % (fullname, scenePath))
def loadRegistry(self): self.portalRegistry = JSONHelper.tryLoadJSON( self.getProject().getGameConfigPath(_GII_PORTAL_DATA_NAME), 'portal data') if not self.portalRegistry: self.portalRegistry = {} self.scanPortals()
def importAsset(self, node, reload=False): data = {} path = node.getFilePath() if path.endswith('.dict.csv'): try: data = loadCSVAsDict(node.getAbsFilePath()) except Exception as e: logging.warn('Failed importing dict csv:' + node.getPath()) return False elif path.endswith('.list.csv'): try: data = loadCSVAsList(node.getAbsFilePath()) except Exception as e: logging.warn('Failed importing list csv:' + node.getPath()) return False else: try: data = loadPlainCSV(node.getAbsFilePath()) except Exception as e: logging.warn('Failed importing plain csv:' + node.getPath()) return False cachePath = node.getCacheFile('data') if not JSONHelper.trySaveJSON(data, cachePath): logging.warn('failed saving csv data to json: %s' % cachePath) return False node.assetType = 'data_csv' node.setObjectFile('data', cachePath) return True
def importAsset(self, node, reload = False ): if node.isVirtual(): return node.assetType = 'msprite' node.groupType = None node.setBundle() #atlas atlasFile = node.getCacheFile( 'atlas' ) # node.setObjectFile( 'atlas', atlasFile ) #define defFile = node.getCacheFile( 'def' ) node.setObjectFile( 'def', defFile ) packedDefFile = node.getCacheFile( 'packed_def' ) node.setObjectFile( 'packed_def', packedDefFile ) proj = MSpriteProject() #traverse path filePath = node.getFilePath() nodePath = node.getNodePath() proj.loadFolder( node.getAbsFilePath() ) #TODO: let texture library handle atlas absAtlas = AssetLibrary.get().getAbsProjectPath( atlasFile ) absDef = AssetLibrary.get().getAbsProjectPath( defFile ) absPackDef = AssetLibrary.get().getAbsProjectPath( packedDefFile ) data = proj.save( absAtlas, absDef ) JSONHelper.saveMsgPack( data, absPackDef ) atlasNode = node.affirmChildNode( node.getBaseName() + '_texture', 'texture', manager = 'asset_manager.texture' ) atlasNode.setWorkspaceData( 'source', atlasFile ) app.getModule( 'texture_library' ).scheduleImport( atlasNode ) if node.getMetaData( 'build_sub_deck', False ): if data: for animIdx, animData in data[ 'anims' ].items(): print(( 'sub node', animData[ 'name' ] )) deprecated = animData.get( 'deprecated', False ) subNode = node.affirmChildNode( animData[ 'name' ], 'deck2d.msprite_seq', manager = self ) subNode.setInternalDeprecated( deprecated ) return True
def importAsset(self, node, reload=False): if node.isVirtual(): return node.assetType = 'named_tileset_pack' #atlas atlasFile = node.getCacheFile('atlas') node.setObjectFile('atlas', atlasFile) #define defFile = node.getCacheFile('def') node.setObjectFile('def', defFile) proj = TilesetProject() proj.loadPSD(node.getAbsFilePath()) absAtlas, absDef = node.getAbsObjectFile( 'atlas'), node.getAbsObjectFile('def') proj.save(absAtlas, absDef) #TODO: let texture library handle atlas pack = JSONHelper.tryLoadJSON(absDef) for item in pack['themes']: node.affirmChildNode(item['name'], 'named_tileset', manager=self) return True
def importAsset(self, node, reload=False): try: with open(node.getAbsFilePath(), 'r', encoding='utf-8') as fp: # text = fp.read() data = orderedLoadYaml(fp) except Exception as e: logging.warn('failed to parse yaml:%s' % node.getPath()) print(e) return False self.postDataLoad(node, data) cachePath = node.getCacheFile('data') if not JSONHelper.trySaveJSON( data, cachePath, 'yaml2json', sort_keys=False): logging.warn('failed saving yaml data to json: %s' % cachePath) return False node.assetType = 'data_yaml' node.setObjectFile('data', cachePath) return True
k = row[0] v = row[1] if k: if k in output: logging.warn( 'duplicated key "%s" in Dict CSV: %s' % ( k , path ) ) output[ k ] = v return output def loadCSVAsList( path ): data = loadPlainCSV( path ) if not data: return output = [] header = data[ 0 ] for row in data[1:] : rowItem = {} for i, v in enumerate( row ): try: key = header[ i ] rowItem[ key ] = v except Exception as e: pass output.append( rowItem ) return output if __name__ == '__main__': from gii.core import AssetManager, AssetLibrary, getProjectPath, app, JSONHelper data = loadPlainCSV( 'test/test.csv' ) # data = loadCSVAsList( 'test/test.csv' ) JSONHelper.trySaveJSON( data, 'test/test_csv.json' )