def writeFileMoverProcessFile(pipelineSpecs,processFrames):
    
    pipelineTools = pipelineSpecs["pipelineTools"]
    toolTasks = []
    
    # Set up all input/output folders for each tool
    for toolName,tool in pipelineTools.items():
        toolTasks.append( {"type":"makeDirs", "dir" : tool["outputDir"]} )
        toolTasks.append( {"type":"makeDirs", "dir" : tool["inputDir"]} )
    
    # Set up simlinks (make link to outputs in input folders of parent tools)
    if "linkAllTools" in pipelineSpecs["fileMover"] and pipelineSpecs["fileMover"]["linkAllTools"]:
        for toolName,tool in pipelineTools.items():
            for parentName in tool["parents"]:
                d = pipelineTools[parentName]["inputDir"]
                toolTasks.append( {"type":"symlink", "to" : os.path.join(d, toolName), "from" :  tool["outputDir"]} )
    
    # Set up all other dirs in makeDirectories
    if "additionalTasks" in pipelineSpecs["fileMover"]:
        toolTasks +=  pipelineSpecs["fileMover"]["additionalTasks"] 
    
    # set up all folders and links for each process
    for procIdx, fList in enumerate(processFrames):
            
        o = open(pipelineSpecs["fileMover"]["fileMoverProcessFile"].format(procIdx),"w+")
        c=[]

        # add for this process all tool tasks
        c += toolTasks
        
        # Set up all file move stuff for all frames (recover,dependend...)
        for fr in fList:
             for m in fr["fileMover"]:
                 c.append(m)
        cF.jsonDump(c,o,indent=4)
        o.close()
        
    
    print("Wrote file mover pre-process files for all ranks for local directory")  
def main():
    
    """ {old validatation file infos}  is compared 
        to { new validation file infos}  ==> outputs new file validation info
    """
    
    parser = MyOptParser()
    
    parser.add_argument("-s", "--searchDirNew", dest="searchDirNew",
            help="""This is the search directory where it is looked for output files (.tiff,.exr,.rib.gz). """, 
            metavar="<path>", default=None, required=False)
    
    
    parser.add_argument("--valFileInfoGlobNew", dest="valFileInfoGlobNew",
            help="""
            The globbing expression for all input xmls with file status which 
            are consolidated into a new file info under --output. The found and validated files in --searchDir (if specified) are
            added to the set of new files.
            """, default=None, metavar="<glob>", required=False)
    
    parser.add_argument("--valFileInfoGlobOld", dest="valFileInfoGlobOld",
            help="""
            The globbing expression for all old input xmls with file status which 
            are consolidated with the new files into a combined file info under --output.
            """, default=None, metavar="<glob>", required=False)
    
    parser.add_argument("--pipelineSpecs", dest="pipelineSpecs", default="",
            help="""Json file with info about the pipeline, fileValidation, fileValidationTools.                 
                 """, metavar="<string>", required=True)
    
    parser.add_argument("--statusFolder", dest="statusFolder", default=None,
            help="""The output status folder which contains links to files which are finished, or can be recovered.                
                 """, metavar="<string>", required=False)
    
                                                       
    parser.add_argument("--validateOnlyLastModified", dest="validateOnlyLastModified", type=cF.toBool, default=True,
            help="""The file with the moset recent modified time is only validated, all others are set to finished!.""", required=False)
                         

    parser.add_argument("-o", "--output", dest="output",
            help="""The output xml which is written, which proivides validation info for each file found""", metavar="<path>", required=True)
    
    
    try:
        
        print("====================== FileValidation ===========================")
        
        opts= AttrMap(vars(parser.parse_args()))
        if not opts.searchDirNew and not opts.valFileInfoGlobNew:
            raise ValueError("You need to define either searchDirNew or valFileInfoGlobNew!")
        
        if opts.valFileInfoGlobOld == "":
            opts.valFileInfoGlobOld = None
        
        print("searchDir: %s" % opts.searchDirNew)
        print("valFileInfoGlobNew: %s" % opts.valFileInfoGlobNew)
        print("valFileInfoGlobOld: %s" % opts.valFileInfoGlobOld)
        print("output: %s" % opts.output)
        
        
        d = cF.jsonLoad(opts.pipelineSpecs)
        pipelineTools = d["pipelineTools"]
        fileValidationSpecs = d["fileValidationSpecs"]
        fileValidationTools = d["fileValidationTools"]
        
        valDataAllNew = dict()
        deleteFiles = []
        
        # load new validataion datas
        if opts.valFileInfoGlobNew is not None:
            print("Load new validation files")
            valDataAllNew , valFilesNew  = loadValidationFiles(opts.valFileInfoGlobNew)
            
            preferGlobalPaths(valDataAllNew)
            
        
        # add searchDir files to new set
        # search files ============================================================================
        if opts.searchDirNew is not None:
            print("Validate all files in: %s with pipeLineSpecs: %s" % (opts.searchDirNew , opts.pipelineSpecs) )
            allFiles = searchFiles(opts.searchDirNew, opts, fileValidationSpecs,fileValidationTools,pipelineTools)
            for ha, f in allFiles.items():
              if ha in valDataAllNew:
                  print("""WARNING: File %s already found in validation data set 
                           from globbing expr. %s """ % (f["absPath"], opts.valFileInfoGlobNew))
              else:
                valDataAllNew[ha] = f
        # ===============================================================================================
        
        
        
        # load old validation datas
        if opts.valFileInfoGlobOld is not None:
            print("Load old validation files")
            valDataAllOld , valFilesOld  = loadValidationFiles(opts.valFileInfoGlobOld)
            preferGlobalPaths(valDataAllOld)
            
            # add old to new validatation infos 
            for ha, valInfo in valDataAllOld.items():
              
                if ha not in valDataAllNew:
                    # this old file hash is not in our current list, so add it!
                    
                    # check absPath if it exists otherwise try to extent the relPath with dir of this validation file.
                    if not os.path.exists(valInfo["absPath"]):
                      absPath = os.path.join( os.path.dirname(valInfo["validatationInfoPath"]) , valInfo["relPath"] )
                      if not os.path.exists(absPath):
                         print(valInfo["validatationInfoPath"])
                         raise NameError("""File path in valid. info file: %s 
                                            does not exist, extended rel. path to: %s does also not exist!""" % (valInfo["absPath"],absPath))
                      else:
                         print("Replacing inexisting path %s with %s", valInfo["absPath"], absPath)
                         valInfo["absPath"] = absPath
                      
                    # copy element to new file info
                    valDataAllNew[ha] = valInfo
                else:
                    # we have the same hash in the new info
                    # take our new one which is better!
                    # delete old file if it is not linked to by new file

                    if  os.path.realpath(valDataAllNew[ha]["absPath"]) !=  os.path.realpath(valInfo["absPath"]):
                        deleteFiles.append(valInfo["absPath"])
     

        # make final list
        finalFiles = [ f for f in valDataAllNew.values() ]
        
        printSummary(finalFiles,pipelineTools,False)
        
        print("Make output validation file")
        f = open(opts.output,"w+")
        cF.jsonDump(finalFiles,f, sort_keys=True)
        f.close();
        
        # Renew status folder, move over new xml info
        if opts.statusFolder is not None:
          
            print("Renew status folder:")
            finished = os.path.join(opts.statusFolder,"finished")
            recover = os.path.join(opts.statusFolder,"recover")
            
            cF.makeDirectory(finished,interact=False, defaultMakeEmpty=True)
            cF.makeDirectory(recover ,interact=False, defaultMakeEmpty=True)
            # make symlinks for all files in the appropriate folder:
            paths = {"recover": recover, "finished": finished}           
            
            for f in finalFiles:
                h = f["hash"]
                p = os.path.relpath(f["absPath"],start=paths[f["status"]])
                filename = os.path.basename(p)
                head,ext = os.path.splitext(filename)
                
                os.symlink(p, os.path.join( paths[f["status"]] , head+"-uuid-"+h+ext ) );


        print("=================================================================")
        
    except Exception as e:
        print("====================================================================")
        print("Exception occured: " + str(e))
        print("====================================================================")
        traceback.print_exc(file=sys.stdout)
        parser.print_help()
        return 1