def callSAIWorkers(fastqDir, fastaPath, numThreads = 12):

    #contained all fastas against which to map the fastqs
    fastaFilePaths = workerForBam.getFastaFilePaths(fastaPath)   
    print fastaFilePaths
    
    #contained all fastqs 
    fastqFilePaths = workerForBam.getFastqFilePaths(fastqDir)
    print fastqFilePaths
    
    #get number of processors
    numProcs = multiprocessing.cpu_count()
    print 'number of cpus: ', numProcs

    #compute number of jobs
    numJobs = len(fastqFilePaths)*len(fastaFilePaths)

    #initialize pool with number of possible jobs
    pool = Pool(processes=min(numJobs, numProcs))
    workersArgs = []

    #for each read and fasta create a job
    for fastaFilePath in fastaFilePaths:
        for fastqPath in fastqFilePaths:
            workersArgs.append((fastaFilePath, fastqPath, numThreads))

    results = pool.map(callSAIWorker, workersArgs)
    pool.close()
    pool.join()
    return results
def writeCombineBAMJobsFromSAI(outDir, fastqDir, fastaPath, lockDirPath):
    combinedBAMJobsName = "combinedBAMFrmSingleSAIJob.jobs"
    combinedBAMJobsPath = os.path.join(outDir, combinedBAMJobsName)
    tools = workerForBam.getToolsDict()

    # contained all fastas against which to map the fastqs
    fastaFilePaths = workerForBam.getFastaFilePaths(fastaPath)

    # contained all fastqs
    fastqFilePaths = workerForBam.getFastqFilePaths(fastqDir)

    print "fastaFilePaths: ", fastaFilePaths
    with open(combinedBAMJobsPath, "w") as combinedBAMJobsFile:
        for fastqPath in fastqFilePaths:
            for fastaFilePath in fastaFilePaths:
                workerForBam.writeSAIIToBAMJob(combinedBAMJobsFile, fastaFilePath, fastqPath, lockDirPath, tools)
    return combinedBAMJobsPath
def writeCombineSAIJobs(outDir, fastqDir, fastaPath, lockDirPath, numThreads):
    combinedSAIJobsName = 'combinedSAIJob.jobs'
    combinedSAIJobsPath = os.path.join(outDir, combinedSAIJobsName)
    tools = workerForBam.getToolsDict()
    
    #contained all fastas against which to map the fastqs
    fastaFilePaths = workerForBam.getFastaFilePaths(fastaPath)   

    #contained all fastqs
    fastqFilePaths = workerForBam.getFastqFilePaths(fastqDir)
    
    print 'fastaFilePaths: ', fastaFilePaths
    with open(combinedSAIJobsPath, 'w') as combinedSAIJobsFile:
        for fastqPath in fastqFilePaths:
            for fastaFilePath in fastaFilePaths:
                workerForBam.writeSAIJob(combinedSAIJobsFile, fastaFilePath,\
                                             fastqPath, lockDirPath, tools,\
                                             numThreads)
    return combinedSAIJobsPath
def callSAIWorkers(fastqDir, fastaPath, numThreads = 12):

    #contained all fastas against which to map the fastqs
    fastaFilePaths = workerForBam.getFastaFilePaths(fastaPath)   
    print fastaFilePaths
    
    #contained all fastqs 
    fastqFilePaths = workerForBam.getFastqFilePaths(fastqDir)
    print fastqFilePaths
    
    #compute number of jobs
    numJobs = len(fastqFilePaths)*len(fastaFilePaths)
    print 'number of jobs: ', numJobs

    sys.stdout.flush()
    
    #for each read and fasta create a job
    for fastaFilePath in fastaFilePaths:
        for fastqPath in fastqFilePaths:
            result = callSAIWorker((fastaFilePath, fastqPath, numThreads))
            print result, (fastaFilePath, fastqPath, numThreads)
            sys.stdout.flush()