def main():

    try: 
        parser = MyOptParser()
        parser.add_argument("-p","--processFile", dest="processFile", default="" ,
                help="""Json file with process description.""", metavar="<path>", required=True)    
        
        
        opts= AttrMap(vars(parser.parse_args()))
          
        if(opts.processFile):
            
            processFile = cF.jsonLoad(opts.processFile);

            # load process module/class
            mod, ProcessClass = iH.importClassFromModule( **processFile["processClass"] )

            process = ProcessClass( processFile, jobGenModules = {"importHelpers":iH, "commonFunctions" : cF} )
            process.doProcessing()
           
        return 0
            
    except Exception as e:
        sys.stdout.flush()
        print("====================================================================", file=sys.stderr)
        print("Exception occured here: " + str(e), file=sys.stderr)
        print("====================================================================", file=sys.stderr)
        traceback.print_exc(file=sys.stderr)
        return 1
def main():
    
    """ {old validatation file infos}  is compared 
        to { new validation file infos}  ==> outputs new file validation info
    """
    
    parser = MyOptParser()
    
    parser.add_argument("-s", "--searchDirNew", dest="searchDirNew",
            help="""This is the search directory where it is looked for output files (.tiff,.exr,.rib.gz). """, 
            metavar="<path>", default=None, required=False)
    
    
    parser.add_argument("--valFileInfoGlobNew", dest="valFileInfoGlobNew",
            help="""
            The globbing expression for all input xmls with file status which 
            are consolidated into a new file info under --output. The found and validated files in --searchDir (if specified) are
            added to the set of new files.
            """, default=None, metavar="<glob>", required=False)
    
    parser.add_argument("--valFileInfoGlobOld", dest="valFileInfoGlobOld",
            help="""
            The globbing expression for all old input xmls with file status which 
            are consolidated with the new files into a combined file info under --output.
            """, default=None, metavar="<glob>", required=False)
    
    parser.add_argument("--pipelineSpecs", dest="pipelineSpecs", default="",
            help="""Json file with info about the pipeline, fileValidation, fileValidationTools.                 
                 """, metavar="<string>", required=True)
    
    parser.add_argument("--statusFolder", dest="statusFolder", default=None,
            help="""The output status folder which contains links to files which are finished, or can be recovered.                
                 """, metavar="<string>", required=False)
    
                                                       
    parser.add_argument("--validateOnlyLastModified", dest="validateOnlyLastModified", type=cF.toBool, default=True,
            help="""The file with the moset recent modified time is only validated, all others are set to finished!.""", required=False)
                         

    parser.add_argument("-o", "--output", dest="output",
            help="""The output xml which is written, which proivides validation info for each file found""", metavar="<path>", required=True)
    
    
    try:
        
        print("====================== FileValidation ===========================")
        
        opts= AttrMap(vars(parser.parse_args()))
        if not opts.searchDirNew and not opts.valFileInfoGlobNew:
            raise ValueError("You need to define either searchDirNew or valFileInfoGlobNew!")
        
        if opts.valFileInfoGlobOld == "":
            opts.valFileInfoGlobOld = None
        
        print("searchDir: %s" % opts.searchDirNew)
        print("valFileInfoGlobNew: %s" % opts.valFileInfoGlobNew)
        print("valFileInfoGlobOld: %s" % opts.valFileInfoGlobOld)
        print("output: %s" % opts.output)
        
        
        d = cF.jsonLoad(opts.pipelineSpecs)
        pipelineTools = d["pipelineTools"]
        fileValidationSpecs = d["fileValidationSpecs"]
        fileValidationTools = d["fileValidationTools"]
        
        valDataAllNew = dict()
        deleteFiles = []
        
        # load new validataion datas
        if opts.valFileInfoGlobNew is not None:
            print("Load new validation files")
            valDataAllNew , valFilesNew  = loadValidationFiles(opts.valFileInfoGlobNew)
            
            preferGlobalPaths(valDataAllNew)
            
        
        # add searchDir files to new set
        # search files ============================================================================
        if opts.searchDirNew is not None:
            print("Validate all files in: %s with pipeLineSpecs: %s" % (opts.searchDirNew , opts.pipelineSpecs) )
            allFiles = searchFiles(opts.searchDirNew, opts, fileValidationSpecs,fileValidationTools,pipelineTools)
            for ha, f in allFiles.items():
              if ha in valDataAllNew:
                  print("""WARNING: File %s already found in validation data set 
                           from globbing expr. %s """ % (f["absPath"], opts.valFileInfoGlobNew))
              else:
                valDataAllNew[ha] = f
        # ===============================================================================================
        
        
        
        # load old validation datas
        if opts.valFileInfoGlobOld is not None:
            print("Load old validation files")
            valDataAllOld , valFilesOld  = loadValidationFiles(opts.valFileInfoGlobOld)
            preferGlobalPaths(valDataAllOld)
            
            # add old to new validatation infos 
            for ha, valInfo in valDataAllOld.items():
              
                if ha not in valDataAllNew:
                    # this old file hash is not in our current list, so add it!
                    
                    # check absPath if it exists otherwise try to extent the relPath with dir of this validation file.
                    if not os.path.exists(valInfo["absPath"]):
                      absPath = os.path.join( os.path.dirname(valInfo["validatationInfoPath"]) , valInfo["relPath"] )
                      if not os.path.exists(absPath):
                         print(valInfo["validatationInfoPath"])
                         raise NameError("""File path in valid. info file: %s 
                                            does not exist, extended rel. path to: %s does also not exist!""" % (valInfo["absPath"],absPath))
                      else:
                         print("Replacing inexisting path %s with %s", valInfo["absPath"], absPath)
                         valInfo["absPath"] = absPath
                      
                    # copy element to new file info
                    valDataAllNew[ha] = valInfo
                else:
                    # we have the same hash in the new info
                    # take our new one which is better!
                    # delete old file if it is not linked to by new file

                    if  os.path.realpath(valDataAllNew[ha]["absPath"]) !=  os.path.realpath(valInfo["absPath"]):
                        deleteFiles.append(valInfo["absPath"])
     

        # make final list
        finalFiles = [ f for f in valDataAllNew.values() ]
        
        printSummary(finalFiles,pipelineTools,False)
        
        print("Make output validation file")
        f = open(opts.output,"w+")
        cF.jsonDump(finalFiles,f, sort_keys=True)
        f.close();
        
        # Renew status folder, move over new xml info
        if opts.statusFolder is not None:
          
            print("Renew status folder:")
            finished = os.path.join(opts.statusFolder,"finished")
            recover = os.path.join(opts.statusFolder,"recover")
            
            cF.makeDirectory(finished,interact=False, defaultMakeEmpty=True)
            cF.makeDirectory(recover ,interact=False, defaultMakeEmpty=True)
            # make symlinks for all files in the appropriate folder:
            paths = {"recover": recover, "finished": finished}           
            
            for f in finalFiles:
                h = f["hash"]
                p = os.path.relpath(f["absPath"],start=paths[f["status"]])
                filename = os.path.basename(p)
                head,ext = os.path.splitext(filename)
                
                os.symlink(p, os.path.join( paths[f["status"]] , head+"-uuid-"+h+ext ) );


        print("=================================================================")
        
    except Exception as e:
        print("====================================================================")
        print("Exception occured: " + str(e))
        print("====================================================================")
        traceback.print_exc(file=sys.stdout)
        parser.print_help()
        return 1
def main():
    
    global subProcess,shutdown
    
    parser = MyOptParser()
    
    try:
        
        parser.add_argument("-c", "--renderCommand", dest="renderCommand", default="prman -t:1" ,
                help="""The render command to call.""", metavar="<path>", required=True)         
        
        parser.add_argument("-p", "--processFile", dest="processFile", default="./RenderProcess.xml" ,
                help="""Input file xml path with a list of files for rendering""", metavar="<path>", required=True)
        
           
        opts= AttrMap(vars(parser.parse_args()))
      
        print("================== Rendering Frames =========================")
        print("Script: " + __file__ )
        
        if(opts.processFile):
            frames = cF.jsonLoad(opts.processFile);

            if not frames:
                raise ValueError("No frames specified in xml %s" % opts.processFile)
            
            for f in frames:
              
                if shutDown:
                  break
                  
                print("Render frame: %s " % f["inputFile"])
                command = opts.renderCommand.split(" ") + [f["inputFile"]]
                print("Command: %s" % str(" ".join(command)))
                sys.stdout.flush()
                
                try:
                  subProcess = subprocess.Popen(command, stderr=subprocess.STDOUT)
                  # Waiting for process
                  subProcess.wait()
                  subProcess = None
                  
                except Exception as c:
                  raise NameError("Rendering Process Error: for file: %s with render output: %s " % (f,c.output) )
                  
        if shutDown:
            print("Render Loop shutdown")
        
        print("============================================================")        
            

    except Exception as e:
      
        print("====================================================================", file=sys.stderr)
        print("Exception occured: " + str(e), file=sys.stderr)
        print("====================================================================", file=sys.stderr)
        traceback.print_exc(file=sys.stderr)
        return 1
    
    print("================== Rendering finished ==================== ")
    
    return 0
def main():
         
    
    parser = MyOptParser()

    parser.add_argument("--pipelineSpecs", dest="pipelineSpecs", default="" ,
            help="""Json file with info about the pipeline, fileValidation, fileValidationTools.""", metavar="<path>", required=True)    
    
    parser.add_argument("--validationFileInfo", dest="validationFileInfo", default="" ,
            help="""XML file with info about render output files.""", metavar="<path>", required=False)
                                                                
    parser.add_argument("-p", "--processes", type=int, dest="processes", default=int(1),
            help="The number of processes for the cluster render", metavar="<integer>", required=True)
    
            
    try:
        
        print("================== Prepare for Cluster Pipeline Job============")
        
        opts= AttrMap(vars(parser.parse_args()))
        
        pipelineSpecs = cF.jsonLoad(opts.pipelineSpecs)
        

        pipelineTools = pipelineSpecs["pipelineTools"]
        
        # tool1 ---> tool2 ----> tool3
        #                        : dependency on tool2
        #            :tool3 is parent
                                 
        # define parents and dependencies for all tools
        for toolName,tool in pipelineTools.items():
            if "dependencies" not in tool:
                tool["dependencies"]=set()
            
            tool["parents"]=set()
            
        for toolName,tool in pipelineTools.items():
            for dep in tool["dependencies"]:
                t = pipelineTools[dep]
                t["parents"].add(toolName)

        
        frameGenerator = pipelineSpecs["frameGenerator"]
#        fileValidationSpecs = d["fileValidationSpecs"]
#        fileValidationTools = d["fileValidationTools"]
        
        # Important job modules to hand over to frameGenerators and processFileWriters
        importantModules = {"importHelpers":iH, "commonFunctions" : cF, "getSimFileInfos" : getSimFileInfos}
        
        # Generate Frames =====================================================
        mod, frameGenerator["generator"] = iH.importClassFromModuleString(frameGenerator["generator"])
        # hand over some modules to the frame generator!
        fgen = frameGenerator["generator"](pipelineSpecs, jobGenModules =  importantModules )
        allFrames,framesPerIdx, framesToDistribute = fgen(**frameGenerator["arguments"])
        # =====================================================================
        
            
        # Formatting frames ========================================================
        # format strings in all settings (if possible) in allFrames again with itself     
        for i,fr in enumerate(allFrames):
            allFrames[i] = cF.formatAll(fr,fr,formatter=StrExpr)
        
        # Filter Frames =======================================================
        recoverFrames(opts,allFrames,framesPerIdx,pipelineTools)
        #======================================================================
                  
        # make a list of all frames which are not-completely finished 
        # (that are frames where all tools with no parent (the last one) are not finished, we need  at least one )
        notcompleted = lambda frame:   sum( 1 if frame["tools"][toolName]["status"] != STATUS_FINISHED 
                                                else 0 for toolName,tool in pipelineTools.items() if len(tool["parents"])==0 ) > 0
        framesCount = len(allFrames);
        allFrames = list(filter(notcompleted, allFrames))
        framesToDistribute = list(filter(notcompleted, framesToDistribute))
        print("Removed %d finished frames!" % (framesCount - len(allFrames)) )
        

        #count number of frames to render
        totalFrames = len(framesToDistribute);
        print("Number of frames to compute %i" % totalFrames)
        if(totalFrames == 0):
          print("No frames to distribute -> exit")
          return 0
        
        # Distribute the processes over the number of processes ===============
        processFrames = distributeFrames(opts,framesToDistribute)
        #======================================================================

        
        # Write for each tool in the pipeline the process file, for each process a seperate one
        for toolName,tool in pipelineTools.items():
            
            # load the class and module for the tools processFileWriter
            print("Load processFileGenerator for tool: %s" % toolName )
            mod, tool["processFileGenerator"]["generator"] = iH.importClassFromModuleString(tool["processFileGenerator"]["generator"])
            tool["processFileGenerator"]["generator"](pipelineSpecs, jobGenModules = importantModules).write(processFrames, **tool["processFileGenerator"]["arguments"])
            
            # if we have some info file generator , produce the output
            
            if "infoFileGenerator" in tool:
                print("Load infoFileGenerator for tool: %s" % toolName )
                mod, tool["infoFileGenerator"]["generator"] = iH.importClassFromModuleString(tool["infoFileGenerator"]["generator"])
                tool["infoFileGenerator"]["generator"](pipelineSpecs, jobGenModules = importantModules).write(processFrames, **tool["infoFileGenerator"]["arguments"])
            
        
        # Write FileMover process file  =======================================
        writeFileMoverProcessFile(pipelineSpecs,processFrames)
        # =====================================================================
        return 0
         
    except Exception as e:
        print("====================================================================")
        print("Exception occured: " + str(e))
        print("====================================================================")
        traceback.print_exc(file=sys.stdout)
        parser.print_help()
        return 1
def recoverFrames(opts,allFrames,framesPerIdx, pipelineTools):
    
    
    def addFile(frame,file,parent=None):
        
        if "usedFile" in file and file["usedFile"] :
            #this file has already been used
            return
        
        if file["status"]==STATUS_RECOVER:
            
            print("added File: %s (recover)" % file["relPath"])
            # add a file move to recover this file
            frame["fileMover"].append(file["fileMoveRecover"])
            # mark file as used
            file["usedFile"] = True
            
        elif file["status"]==STATUS_FINISHED:
            if parent:
                print("added File: %s (finished, dependent)" % file["relPath"])
                # add a file move to recover this file
                frame["fileMover"].append(file["fileMoveDependent"])
                #print("id", id(frame), frame["fileMover"] )
                # mark file as used
                file["usedFile"] = True

    def addTool(frame,toolName, visitedTools,  parentToolName=None):        
        
       if toolName in visitedTools:
           return
           
       visitedTools.add(toolName);
       frameTool = frame["tools"][toolName]
       # if tool is not finished
       if frameTool["status"] != STATUS_FINISHED:    
           
            # add all this tools checkpoint files
            for outFileProp in frameTool["outputFiles"]:
                   
                if not outFileProp["cpFile"] == None:
                    addFile(frame,outFileProp["cpFile"],parentToolName)
          
            # add all dependent tools
            depTools = pipelineTools[toolName]["dependencies"]
            if depTools:
                for depTool in depTools:
                   addTool(frame,depTool,visitedTools,toolName)
                
       else:
            # we are finished, but
            # if we have a parent tool (always not finished), 
            # we add our finished checkpoint files
            if parentToolName:
               # add all its checkpoint files of output files
                for outFileProp in frameTool["outputFiles"]:
                   
                    if outFileProp["cpFile"] == None:
                        raise ValueError("""Trying to add non existant checkpoint file of output file %s in tool 
                        %s!""" % (str(outFileProp),toolName) )
                   
                    addFile(frame,outFileProp["cpFile"],parentToolName)
                   
            #else:
            # if no parent given, dont do anything

    # get all file info
    if opts.validationFileInfo:
        print("Setup recovery from file info===============================")
        print("Using validation file: %s", opts.validationFileInfo)
        checkpointFiles = cF.jsonLoad(opts.validationFileInfo);
        
        cpFiles = { "hash": {}, "all" : []}
        
        for f in checkpointFiles:
            tool = f["tool"]
            fileId = f["hash"]
            frameIdx = int(f["frameIdx"])
            ha = f["hash"]
            
            
            cpFiles["all"].append(f)
            
            if ha in cpFiles["hash"]:
                raise ValueError("File %s and %s have the same hash!" % (f["absPath"], cpFiles["hash"][ha]["absPath"] ) ) 
            else:
                cpFiles["hash"][ha] = f

        print("===========================================================")
        
        print("Determine status of all tools =============================")
        # move over all frames, for each tool and match cpFiles 
        for frameIdx,frame in framesPerIdx.items():
            finished = False;
            
            for toolName,tool in frame["tools"].items():
                
                # if there are checkpoint files corresponding to outputfiles of this tool
                finishedOutFiles = 0
                for outFileProp in tool["outputFiles"]:

                    ha =  cF.makeUUID(outFileProp["hashString"])
                    
                    
                    outFileProp["hash"] = ha
                    if ha in cpFiles["hash"]: # we found checkpoint file
                        cpFile = cpFiles["hash"][ha]
                        absP = cpFile["absPath"]
                        
                        print("Frame: %i " % frameIdx + 
                        " checkpoint file matched:\n\t%s\n\thash: %s\n\tstatus: %s " % ( absP[:10]+'...'+absP[-20:] if len(absP) > 70 else absP ,ha, cpFile["status"] ))
                        
                        outFileProp["cpFile"] = cpFile
                        
                        if outFileProp["cpFile"]["status"] == STATUS_FINISHED:
                            finishedOutFiles += 1
                            
                    else:
                        outFileProp["cpFile"] = None
                    

                # if all output files are finished -> tool is finished
                if finishedOutFiles == len(tool["outputFiles"]):
                    
                    tool["status"] =  STATUS_FINISHED
                    print("Tool: %s -> finished" % toolName)
                
        
        #print("Dependency check===========================================")
        
        
        ## for each frameIdx file list, 
        ## travel dependency of each file and if some files are missing       
        ## silently remove this file from the cpFilesPerFrame because this frameIdx can not be recovered!
        
        #invalidFrameIdx = set() # list for not recoverable frames! (should not happen)
        #for frameIdx,frame in framesPerIdx.items():
            
            #if frameIdx not in cpFilesPerFrame:
                #continue
            #stack = cpFilesPerFrame[frameIdx]["all"][:] # shallow copy (remove files from stack)
            #invalidFrame = walkDependencies(stack,cpFilesPerFrame[frameIdx])
            
            #if invalidFrame:
                #print("Invalid frameIdx: %i for recovery!" % frameIdx)
                #invalidFrameIdx.add(frameIdx)
                ## continue to next frame
                #continue
        ##endfor
                
        ## remove all files from all tools for invalid frames
        #for k in invalidFrameIdx:
            #for toolName,tool in framesPerIdx[k].items():
                #if toolName in pipelineTools.keys():
                    #tool["checkpointFiles"] = []
                
        #print("===========================================================")
        
        
        # setup recovery for all frames
        print("Setup pipeline tools with file info ========================")
        for frame in allFrames:
            
            # walk all tools in pipeline (visit all once!)
            for tool in pipelineTools.keys():
               addTool(frame,tool,set())
                       

        print("===============================================================")
Beispiel #6
0
def main():
         
    
    parser = MyOptParser()
    
    try:

        parser.add_argument("-p", "--processFile", dest="processFile", default="./FileMove.xml" ,
                help="""Input .json file with a list of files for moving into place""", metavar="<path>", required=True)
        opts= AttrMap(vars(parser.parse_args()))
      
        print("========================= Move Files ===========================")
        
        if opts.processFile:
            tasks = cF.jsonLoad(opts.processFile);
            for task in tasks:
                try:
                    
                    # avoiding links to links (get the real path resolving all symlinks)
                    # if that is necessary is not known
                    if "from" in task:
                        task["realfrom"] = os.path.realpath(task["from"])
                    
                    print("Task:", task)

                    if task["type"] == "hardlink":
                        try:
                            os.link(task["realfrom"],task["to"] )
                            continue
                        except:
                          task["type"] ="symlink"
                           
                    if task["type"] == "symlink":
                        try:
                            os.symlink(task["realfrom"],task["to"] )
                            continue
                        except:
                            task["type"] ="copy"
                    
                    if task["type"] == "copy":
                        shutil.copy2(task["from"],task["to"])
                        continue
                        
                    if task["type"] == "makeDirs" :
                        os.makedirs(task["dir"],exist_ok=True)
                        continue
                    
                    
                    raise ValueError("Type: %s not supported!" % str(task))
                    
                except:
                    raise ValueError("Could not execute task: %s" % str(task))
                    
        print("================================================================")  

    except Exception as e:
        print("====================================================================")
        print("Exception occured: " + str(e))
        print("====================================================================")
        traceback.print_exc(file=sys.stdout)
        return 1

    return 0