def main():

    logger = multiprocessing.log_to_stderr()
    logger.setLevel(multiprocessing.SUBDEBUG)

    if len(sys.argv) >= 4:
        #directory containing fastq library
        fastqsDir = workerForBam.getAbsPath(sys.argv[1])
        
        #directory containing other directories with fasta names
        fastaDir = workerForBam.getAbsPath(sys.argv[2])

        #directory containing file locks
        lockDirPath = workerForBam.getAbsPath(sys.argv[3])
        
        #directory containing temp output -> fastQ's, jobsFile 
        outDir = workerForBam.getAbsPath(sys.argv[4])

        #write all fastq's processing in job file
        combineJobPath = writeCombineBAMJobsFromSAI(outDir, fastqsDir,\
                                                        fastaDir,\
                                                        lockDirPath)

        #call workers to generate paired BAMs from SAIs
        #results = callPairedSAIToBAMWorkers(fastqsDir, fastaDir)
        #print results

    else:
        print 'err: files missing'
def main():

    if len(sys.argv) >= 4:
        #directory containing fastq library
        fastqsDir = workerForBam.getAbsPath(sys.argv[1])
        
        #directory containing other directories with fasta names
        fastaDir = workerForBam.getAbsPath(sys.argv[2])

        #directory containing file locks
        lockDirPath = workerForBam.getAbsPath(sys.argv[3])
        
        #directory containing temp output -> fastQ's, jobsFile 
        outDir = workerForBam.getAbsPath(sys.argv[4])
        
        #now for all scaffolds combined bams and look for SNPs
        snpsFinder(fastaDir, outDir, lockDirPath)
        
    else:
        print 'err: files missing'
def main():

    if len(sys.argv) >= 4:
        #directory containing fastq library
        fastqsDir = workerForBam.getAbsPath(sys.argv[1])
        
        #directory containing other directories with fasta names
        fastaDir = workerForBam.getAbsPath(sys.argv[2])

        #directory containing file locks
        lockDirPath = workerForBam.getAbsPath(sys.argv[3])
        
        #directory containing temp output -> fastQ's, jobsFile 
        outDir = workerForBam.getAbsPath(sys.argv[4])

        #initialize deafult value of threads
        numThreads = 12
        #TODO: make the following design correct
        if len(sys.argv) >= 5:
            #TODO: exception handling for conversion to int
            numThreads = int(sys.argv[5])

        #write all fastq's processing in job file
        combineJobPath = writeCombineSAIJobs(outDir, fastqsDir, fastaDir,\
                                                   lockDirPath, numThreads)
        print 'combine jobs path: ', combineJobPath

        #write all serial fastq's processing in job script file
        combineJobPath = writeCombineSerialSAIJobs(outDir, fastqsDir, fastaDir, numThreads)
        print 'combine serial jobs script: ', combineJobPath

        
        #call workers to generate SAIs
        #callSAIWorkers(fastqsDir, fastaDir, numThreads)

    else:
        print 'err: files missing'
def main():

    logger = multiprocessing.log_to_stderr()
    logger.setLevel(multiprocessing.SUBDEBUG)

    if len(sys.argv) >= 4:
        #directory containing fastq library
        fastqsDir = workerForBam.getAbsPath(sys.argv[1])
        
        #directory containing other directories with fasta names
        fastaDir = workerForBam.getAbsPath(sys.argv[2])

        #directory containing file locks
        lockDirPath = workerForBam.getAbsPath(sys.argv[3])
        
        #directory containing temp output -> fastQ's, jobsFile 
        outDir = workerForBam.getAbsPath(sys.argv[4])

        #initialize deafult value of threads
        numThreads = 12
        #TODO: make the following design correct
        if len(sys.argv) >= 5:
            #TODO: exception handling for conversion to int
            numThreads = int(sys.argv[5])

        #write all fastq's processing in job file
        combineJobPath = writeCombineSAIJobs(outDir, fastqsDir, fastaDir,\
                                                   lockDirPath, numThreads)

        #call workers to generate SAIs
        #TODO: take number of threads as args
        #results = callSAIWorkers(fastqsDir, fastaDir, numThreads)

        #print results
        
    else:
        print 'err: files missing'
def main():

    logger = multiprocessing.log_to_stderr()
    logger.setLevel(multiprocessing.SUBDEBUG)

    if len(sys.argv) >= 4:
        #directory containing fastq library
        fastqsDir = workerForBam.getAbsPath(sys.argv[1])
        
        #directory containing other directories with fasta names
        fastaDir = workerForBam.getAbsPath(sys.argv[2])

        #directory containing file locks
        lockDirPath = workerForBam.getAbsPath(sys.argv[3])
        
        #directory containing temp output -> fastQ's, jobsFile 
        outDir = workerForBam.getAbsPath(sys.argv[4])

        #write all fastq's processing in job file
        #combineJobPath = writeCombineFastqJobs(outDir, fastqsDir, fastaDir,\
        #    lockDirPath)


        tools = workerForBam.getToolsDict()
        """retcode = workerForBam.callParallelDrone(combineJobPath,\
                                                     tools['PARALLEL_DRONE'])

        if retcode != 0:
            #error occured while calling parallel drone
            print "parallel drone erred, in executing combined jobs"
            return -1
        """
        #now for all scaffolds combined bams and look for SNPs
        parallelSNPsFinder.snpsFinder(fastaDir, outDir, lockDirPath)
        
    else:
        print 'err: files missing'