def configJobInternal(config): # Get cluster config values # Cluster section is ignored, add it again d = AttrMap( config ) # remove "config" value and convert boolean flags to strings again, # these config is used only with strings in the generator d.pop("config") config = AttrMap( { "Cluster" : d } ) # add the modules path of this file, which can be used in the (.ini files) # and also the execDir of the submit.py config["General"] = { "configuratorModuleDir" : configuratorModuleDir, "configuratorModulePath" : configuratorModulePath, "currentWorkDir" : os.getcwd(), "jobDir" : os.path.dirname(os.path.abspath(config.Cluster.jobGeneratorConfig)), "configureScriptArgs" : " ".join(sys.argv) } config.Cluster.jobGeneratorOutputDir = os.path.abspath(config.Cluster.jobGeneratorOutputDir ) # load generator module dyanamically and start the job generator # load the given Cluster.jobGenerator class module, generatorClass = iH.importClassFromModuleString(config.Cluster.jobGenerator, verbose=True) gen = generatorClass(config) gen.generate() return 0
def main(): parser = MyOptParser() parser.add_argument("--pipelineSpecs", dest="pipelineSpecs", default="" , help="""Json file with info about the pipeline, fileValidation, fileValidationTools.""", metavar="<path>", required=True) parser.add_argument("--validationFileInfo", dest="validationFileInfo", default="" , help="""XML file with info about render output files.""", metavar="<path>", required=False) parser.add_argument("-p", "--processes", type=int, dest="processes", default=int(1), help="The number of processes for the cluster render", metavar="<integer>", required=True) try: print("================== Prepare for Cluster Pipeline Job============") opts= AttrMap(vars(parser.parse_args())) pipelineSpecs = cF.jsonLoad(opts.pipelineSpecs) pipelineTools = pipelineSpecs["pipelineTools"] # tool1 ---> tool2 ----> tool3 # : dependency on tool2 # :tool3 is parent # define parents and dependencies for all tools for toolName,tool in pipelineTools.items(): if "dependencies" not in tool: tool["dependencies"]=set() tool["parents"]=set() for toolName,tool in pipelineTools.items(): for dep in tool["dependencies"]: t = pipelineTools[dep] t["parents"].add(toolName) frameGenerator = pipelineSpecs["frameGenerator"] # fileValidationSpecs = d["fileValidationSpecs"] # fileValidationTools = d["fileValidationTools"] # Important job modules to hand over to frameGenerators and processFileWriters importantModules = {"importHelpers":iH, "commonFunctions" : cF, "getSimFileInfos" : getSimFileInfos} # Generate Frames ===================================================== mod, frameGenerator["generator"] = iH.importClassFromModuleString(frameGenerator["generator"]) # hand over some modules to the frame generator! fgen = frameGenerator["generator"](pipelineSpecs, jobGenModules = importantModules ) allFrames,framesPerIdx, framesToDistribute = fgen(**frameGenerator["arguments"]) # ===================================================================== # Formatting frames ======================================================== # format strings in all settings (if possible) in allFrames again with itself for i,fr in enumerate(allFrames): allFrames[i] = cF.formatAll(fr,fr,formatter=StrExpr) # Filter Frames ======================================================= recoverFrames(opts,allFrames,framesPerIdx,pipelineTools) #====================================================================== # make a list of all frames which are not-completely finished # (that are frames where all tools with no parent (the last one) are not finished, we need at least one ) notcompleted = lambda frame: sum( 1 if frame["tools"][toolName]["status"] != STATUS_FINISHED else 0 for toolName,tool in pipelineTools.items() if len(tool["parents"])==0 ) > 0 framesCount = len(allFrames); allFrames = list(filter(notcompleted, allFrames)) framesToDistribute = list(filter(notcompleted, framesToDistribute)) print("Removed %d finished frames!" % (framesCount - len(allFrames)) ) #count number of frames to render totalFrames = len(framesToDistribute); print("Number of frames to compute %i" % totalFrames) if(totalFrames == 0): print("No frames to distribute -> exit") return 0 # Distribute the processes over the number of processes =============== processFrames = distributeFrames(opts,framesToDistribute) #====================================================================== # Write for each tool in the pipeline the process file, for each process a seperate one for toolName,tool in pipelineTools.items(): # load the class and module for the tools processFileWriter print("Load processFileGenerator for tool: %s" % toolName ) mod, tool["processFileGenerator"]["generator"] = iH.importClassFromModuleString(tool["processFileGenerator"]["generator"]) tool["processFileGenerator"]["generator"](pipelineSpecs, jobGenModules = importantModules).write(processFrames, **tool["processFileGenerator"]["arguments"]) # if we have some info file generator , produce the output if "infoFileGenerator" in tool: print("Load infoFileGenerator for tool: %s" % toolName ) mod, tool["infoFileGenerator"]["generator"] = iH.importClassFromModuleString(tool["infoFileGenerator"]["generator"]) tool["infoFileGenerator"]["generator"](pipelineSpecs, jobGenModules = importantModules).write(processFrames, **tool["infoFileGenerator"]["arguments"]) # Write FileMover process file ======================================= writeFileMoverProcessFile(pipelineSpecs,processFrames) # ===================================================================== return 0 except Exception as e: print("====================================================================") print("Exception occured: " + str(e)) print("====================================================================") traceback.print_exc(file=sys.stdout) parser.print_help() return 1