def getPyFileDependentPyFiles(pyFile, recursive=False, searchScripts=[], recursiveExploredFiles=[]): dependentFiles = [] if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() depImports = getPyFileDependentImports(pyFile, searchScripts=searchScripts) for di in depImports: importContent = re.sub(".*import (.*)", r"\1", di) importContent = re.sub("(.*) as.*", r"\1", importContent) singleImports = importContent.split(", ") for singleImport in singleImports: singleImport = singleImport.split(".") singleImport = singleImport[len(singleImport) - 1] singleImport += ".py" for searchScript in searchScripts: if (singleImport in searchScript) and (searchScript not in recursiveExploredFiles): if recursive: recursiveDependents = getPyFileDependentPyFiles(searchScript, recursive=recursive, searchScripts=searchScripts, recursiveExploredFiles=(recursiveExploredFiles + [searchScript])) for rd in recursiveDependents: if rd not in dependentFiles: dependentFiles.append(rd) if searchScript not in dependentFiles: dependentFiles.append(searchScript) return dependentFiles
def packPyFile(pyFile, packFolder=None, searchScripts=[], remove_previous=False): print("\nSearching -> " + pyFile) if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() if os.path.exists(pyFile) and pyFile in searchScripts: print ("Found in scripts list -> " + pyFile) packName = os.path.splitext(ntpath.basename(pyFile))[0] # first of all we need a pack folder. if not provided is calculated by default. if not packFolder: packFolder = getPackFolder(pyFile) print ("Pack folder not provided. Using default -> " + packFolder) # if a clean pack is needed, the old one is deleted if os.path.exists(packFolder) and remove_previous: shutil.rmtree(packFolder) # if the packFolder doesn't exist, is created if not os.path.exists(packFolder): os.makedirs(packFolder) print ("Pack folder creation -> " + packFolder) # if everything is ok the packing starts if os.path.exists(packFolder): print ("Pack folder exists -> " + packFolder) packFile = packFolder + "/" + packName + ".py" print ("Compiling file content -> " + packFile) compiledString = getPyFileCompiledString(pyFile, searchScripts=searchScripts) print ("Writing file content -> " + packFile) with open(packFile, 'w') as fout: fout.write(compiledString) depFiles = [pyFile] + inspector.getPyFileDependentPyFiles(pyFile, recursive=True, searchScripts=searchScripts) print ("Packing UI files for -> " + packFile) packUIfiles(depFiles, packFolder) print ("Packing dependent files for -> " + packFile) packDependentFiles(depFiles, packFolder) packInitFile = packFolder + "/" + "/__init__.py" print ("Creating __init__ file -> " + packInitFile) with open(packInitFile, 'w') as fout: fout.write("") print ("Process Finished -> " + packFile) else: print("Not found in scripts list -> " + pyFile)
def getPyFileNonDependentImports(pyFile, recursive=False, searchScripts=[], nameOnly=False, recursiveExploredFiles=[]): nonDependentImports = [] if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() importLines = getPyFileImports(pyFile) for importLine in importLines: importContent = re.sub(".*import (.*)", r"\1", importLine) importContent = re.sub("(.*) as.*", r"\1", importContent) singleImports = importContent.split(", ") for singleImport in singleImports: singleImport = singleImport.split(".") singleImport = singleImport[len(singleImport) - 1] singleImport += ".py" found = False for searchScript in searchScripts: if (singleImport in searchScript) and (searchScript not in recursiveExploredFiles): found = True if recursive: recursiveNonDependents = getPyFileNonDependentImports(searchScript, recursive=recursive, searchScripts=searchScripts, recursiveExploredFiles=(recursiveExploredFiles + [searchScript])) for rd in recursiveNonDependents: if rd not in nonDependentImports: nonDependentImports.append(rd) if not found and importLine not in nonDependentImports: nonDependentImports.append(importLine) if nameOnly: defNonDepImports = [] for i in range(len(nonDependentImports)): if " as " in nonDependentImports[i]: importParts = nonDependentImports[i].split(" ") defNonDepImports.append(importParts[len(importParts) - 1]) else: importContent = re.sub(".*import (.*)", r"\1", nonDependentImports[i]) singleImports = importContent.split(", ") for simp in singleImports: defNonDepImports.append(simp) nonDependentImports = defNonDepImports return nonDependentImports
def getPyFileBlocks(pyFile, blockTag, level=0, recursive=False, searchScripts=[], ignoreNotUsed=False, onlyNames=False, recursiveExploredFiles=[]): pyFileBlocks = [] with open(pyFile, "r") as f: fileData = f.read() f.seek(0, 0) fileLines = f.readlines() if recursive: if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() depFiles = getPyFileDependentPyFiles(pyFile, searchScripts=searchScripts) for df in depFiles: if df not in recursiveExploredFiles: subFileBlocks = getPyFileBlocks(df, blockTag, level=level, recursive=True, searchScripts=searchScripts, ignoreNotUsed=ignoreNotUsed, onlyNames=False, recursiveExploredFiles = (recursiveExploredFiles + [df])) if not ignoreNotUsed: for sfb in subFileBlocks: if sfb not in pyFileBlocks: pyFileBlocks.append(sfb) else: subFileBlocksNames = getPyFileBlocks(df, blockTag, level=level, recursive=True, searchScripts=searchScripts, ignoreNotUsed=ignoreNotUsed, onlyNames=True, recursiveExploredFiles = (recursiveExploredFiles + [df])) subFileBlocksAncestors = getPyFileBlocksParents(subFileBlocksNames, subFileBlocks, fileData, lastAncestors=True) # print len(subFileBlocks) # print len(subFileBlocksNames) # print len(subFileBlocksAncestors) # print "------------------------" # for i in range(len(subFileBlocksNames)): # print pyFile # print subFileBlocksNames[i] # print subFileBlocksAncestors[i] # print ("pyFile" in subFileBlocksAncestors[i]) # print "-------------------------------" for i in range(len(subFileBlocks)): if "pyFile" in subFileBlocksAncestors[i]: pyFileBlocks.append(subFileBlocks[i]) state = "searchTag" currentCompilation = "" for line in fileLines: jumpNextLine = False for i in range(level): if line.startswith(" "): line = line[1:] elif line.startswith(" "): line = line[4:] else: if state == "searchTagContent": if line != "\n": state = "searchTag" pyFileBlocks.append(currentCompilation) currentCompilation = "" jumpNextLine = True if jumpNextLine: continue if state == "searchTagContent": if line.startswith(" ") or line.startswith(" ") or line == "\n": currentCompilation += line else: state = "searchTag" pyFileBlocks.append(currentCompilation) currentCompilation = "" if state == "searchTag": if line.startswith(blockTag): currentCompilation += line state = "searchTagContent" jumpNextLine = True if jumpNextLine: continue if onlyNames: for i in range(len(pyFileBlocks)): pyFileBlocks[i] = getBlockName(pyFileBlocks[i]) return pyFileBlocks
def getPyFileCompiledString(pyFile, searchScripts=[]): compiledString = "" packName = os.path.splitext(ntpath.basename(pyFile))[0] if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() # imports section compiledString += "#######################################\n" compiledString += "# imports\n\n" # gets all non dependent imports and add them to the future string noDepImports = inspector.getPyFileNonDependentImports(pyFile, recursive=True, searchScripts=searchScripts) noDepImports.sort() for imp in noDepImports: compiledString += imp + "\n" # gets all file dependent imports. They are needed to make replacements later depImports = inspector.getPyFileDependentImports(pyFile, recursive=True, searchScripts=searchScripts, nameOnly=True) # get all file and dependent files first level definitions to include them if they are used # only definitions that are used should be added definitions = inspector.getPyFileDefinitions(pyFile, level=0, recursive=True, searchScripts=searchScripts, ignoreNotUsed=False) definitionsNames = inspector.getPyFileDefinitions(pyFile, level=0, recursive=True, searchScripts=searchScripts, ignoreNotUsed=False, onlyNames=True) # get all file and dependent files first level classes to include them if they are used classes = inspector.getPyFileClasses(pyFile, level=0, recursive=True, searchScripts=searchScripts, ignoreNotUsed=False) classesNames = inspector.getPyFileClasses(pyFile, level=0, recursive=True, searchScripts=searchScripts, ignoreNotUsed=False, onlyNames=True) # now is time to discard not used defs and classes and keep only the dependent ones usedDefinitions = [] usedDefinitionsNames = [] with open(pyFile, "r") as f: fileData = f.read() definitionsParents = inspector.getPyFileBlocksParents(definitionsNames, (definitions + classes), fileData, lastAncestors=True) # print len(definitionsNames) # print len(definitions) # print len(definitionsParents) for i in range(len(definitions)): if "pyFile" in definitionsParents[i]: # print definitionsNames[i] # print definitionsParents[i] usedDefinitions.append(definitions[i]) usedDefinitionsNames.append(definitionsNames[i]) # for i in range(len(definitions)): # print definitionsNames[i] # definitions section compiledString += "\n#######################################\n" compiledString += "# definitions\n\n" # definitions process. Adds the definitions for i in range(len(usedDefinitions)): compiledString += usedDefinitions[i] + "\n" # classes section compiledString += "\n#######################################\n" compiledString += "# classes\n\n" # classes process. Adds the definitions for i in range(len(classes)): compiledString += classes[i] + "\n" # after adding the definitions and classes, calls must be updated from module.definition() to definition() for i in range(len(definitionsNames)): regx = re.compile(("(.*\." + definitionsNames[i] + "\(.*)"), re.MULTILINE) matches = regx.findall(compiledString) if len(matches) > 0: for match in matches: regx2 = re.compile((".*(?:\s)(.*\." + definitionsNames[i] + ")\(.*"), re.MULTILINE) defCallMatches = regx2.findall(match) defCallSplit = defCallMatches[len(defCallMatches) - 1].split("(") defCall = defCallSplit[len(defCallSplit) - 1] # print defCall # defCallParts = defCall.split(".") defCallParts = re.split("\.|=", defCall) defCallLib = defCallParts[len(defCallParts) - 2] defCallDef = defCallParts[len(defCallParts) - 1] # print defCallLib # print defCallDef if defCallLib in depImports: compiledString = compiledString.replace((defCallLib + "." + defCallDef), defCallDef) # adds the execution string compiledString += "\n#######################################\n" compiledString += "# execution\n" compiledString += 'if __name__ == "__main__": ' + packName + 'Run()' return compiledString
def packPyFileToModuleRecursive(pyFile, packFolder=None, searchScripts=[], mainFile=False): packedFiles = [] if packFolder is None: packFolder = getPackFolder(pyFile) if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() # if file exist and is in search scripts process it if os.path.exists(pyFile) and pyFile in searchScripts: # open the file and read all the lines with open(pyFile, "r") as f: fileData = f.read() f.seek(0, 0) fileLines = f.readlines() packImportHead = os.path.basename(os.path.normpath(packFolder)) packImportHead = packImportHead #+ ".scripts." + packImportHead # get all the import statements to substitute them for the new packed ones pyFileImports = inspector.getPyFileImports(pyFile) for importLine in pyFileImports: # we only need the last import module file to know if it is in the searchScripts imports = re.sub("from .* import ", "", importLine) imports = re.sub("import ", "", imports) imports = re.sub(" as .*", "", imports) imports = imports.replace(" ", "") # could be more than one import in each line, so we split it and loop them lineImports = imports.split(",") for singleImport in lineImports: # could be a module.submodule.subsubmodule import, in that case we only need the last one to check if it is in the searchScripts singleImportParts = singleImport.split(".") if len(singleImportParts) > 0: lastImport = singleImportParts[len(singleImportParts) - 1] # adds the py extension to the end to search the script in the searchScripts lastImportPy = lastImport + ".py" # search the import in the searchScripts and if exists packs that file and replace the current import importFile = [s for s in searchScripts if lastImportPy in s] if len(importFile) > 0: packedFiles += packPyFileToModuleRecursive(importFile[0], packFolder=packFolder, searchScripts=searchScripts) if (importLine + "\n") in fileLines: index = fileLines.index(importLine + "\n") newImport = importLine.replace((singleImport.replace(lastImport, "")), (packImportHead + ".")) fileData = fileData.replace(importLine, newImport) packedPyFile = packFolder + "/" + os.path.splitext(ntpath.basename(pyFile))[0] + ".py" if mainFile: packedPyFile = packedPyFile.replace(".py", "Launcher.py") with open(packedPyFile, 'w') as fout: fout.write(fileData) packedFiles.append(packedPyFile) # print packedPyFile return packedFiles
def packPyFileToModule(pyFile, packFolder=None, searchScripts=[], remove_previous=False): print("\nSearching -> " + pyFile) if len(searchScripts) == 0: searchScripts = main.getLibs() + main.getScripts() + main.getTools() if os.path.exists(pyFile) and pyFile in searchScripts: print ("Found in scripts list -> " + pyFile) packName = os.path.splitext(ntpath.basename(pyFile))[0] # first of all we need a pack folder. if not provided is calculated by default. if not packFolder: packFolder = getPackFolder(pyFile) print ("Pack folder not provided. Using default -> " + packFolder) packFolderScripts = packFolder + "/" + packName + "/scripts/" + packName # if a clean pack is needed, the old one is deleted if os.path.exists(packFolder) and remove_previous: shutil.rmtree(packFolder) # if the packFolder doesn't exist, is created if not os.path.exists(packFolder) or not os.path.exists(packFolderScripts): os.makedirs(packFolderScripts) print ("Pack folder creation -> " + packFolderScripts) # if everything is ok the packing starts if os.path.exists(packFolderScripts): print ("Pack folder exists -> " + packFolderScripts) # packs the .py files print "Packing .py files..." packedPyFiles = packPyFileToModuleRecursive(pyFile, packFolder=packFolderScripts, searchScripts=searchScripts, mainFile=True) print "Packed .py files:" for pf in packedPyFiles: print (" " + pf) # packs the .ui files print "Packing .ui files..." packedUIfiles = packUIfiles(packedPyFiles, packFolderScripts) print "Packed .ui files:" for puf in packedUIfiles: print (" " + puf) # packs all other possible dependent files print "Packing other dependent files..." packedDepFiles = packDependentFiles(packedPyFiles, packFolderScripts) print "Packed dependent files:" for pdep in packedDepFiles: print (" " + pdep) # creates the pack init file for the pack and for the scripts folder # packInitFile = packFolder + "/" + packName + "/__init__.py" # with open(packInitFile, 'w') as fout: fout.write("") # print ("Pack __init__ file creation -> " + packInitFile) packScriptsInitFile = packFolderScripts + "/__init__.py" with open(packScriptsInitFile, 'w') as fout: fout.write("") print ("Pack scripts __init__ file creation -> " + packScriptsInitFile) # the pack needs a mod file to be read by maya at open modFile = packFolder + "/" + packName + ".mod" print ("Creating pack mod file...") with open(modFile, 'w') as fout: fout.write("+ " + packName + " 1.0.0 ../modules/pack/" + packName + "\n") print ("Created pack mod file -> " + modFile) # creates the plugin folder for the pack and the plugin to be read by maya packFolderPlugins = packFolder + "/" + packName + "/plug-ins" if not os.path.exists(packFolderPlugins): os.makedirs(packFolderPlugins) print ("Pack plug-ins folder creation -> " + packFolderPlugins) if os.path.exists(packFolderPlugins): pluginFile = packFolderPlugins + "/" + packName + ".py" print ("Creating pack plugin file...") createPackPluginFile(pluginFile) print ("Created pack plugin file -> " + pluginFile) else: print("Not found in scripts list -> " + pyFile)