Example #1
0
def functionsGoNOW(sampleNames, path, runTrimMetadata, commands):
    print "\nPerforming quality checks on assemblies."
    quastList = quastProcesses(sampleNames, path, runTrimMetadata, commands)
    quastMeta = metadataFiller.filler(runTrimMetadata, quastList)
    runTrimAssemblyMetadata = quastMetadata(sampleNames, path, quastMeta)
    jsonReportR.jsonR(sampleNames, path, runTrimAssemblyMetadata, "Collection")
    return runTrimAssemblyMetadata
Example #2
0
def functionsGoNOW(correctedFiles, path, metadata, fLength, commands):
    """Run the helper function"""
    print("\nAssembling reads.")
    flagMetadataList = spadesPrepProcesses(correctedFiles, path, fLength, metadata, commands)
    flagMetadata = metadataFiller.filler(metadata, flagMetadataList)
    updatedMetadata = contigFileFormatter(correctedFiles, path, flagMetadata)
    assembledFiles = completionist(correctedFiles, path)
    jsonReportR.jsonR(correctedFiles, path, updatedMetadata, "Collection")
    return updatedMetadata, assembledFiles
Example #3
0
def functionsGoNOW(sampleNames, path, runTrimMetadata, commands):
    """Calls all the functions in a way that they can be multi-processed"""
    inputData = referenceFiletoAssembly(path, sampleNames)
    print "\nSampling fastq files."
    sampleMeta = sampleFastq(path, sampleNames, runTrimMetadata, commands)
    indexList = indexTargetsProcesses(path, inputData, sampleMeta, commands)
    indexMeta = metadataFiller.filler(runTrimMetadata, indexList)
    #Start the mapping operations
    mappingList = mappingProcesses(path, inputData, indexMeta, commands)
    mappingMeta = metadataFiller.filler(runTrimMetadata, mappingList)
    extractingList = extractingProcesses(path, inputData, mappingMeta, commands)
    extractingMeta = metadataFiller.filler(runTrimMetadata, extractingList)
    graphingList = graphingProcesses(path, inputData, extractingMeta, commands)
    graphingMeta = metadataFiller.filler(runTrimMetadata, graphingList)
    os.chdir(path)
    runTrimInsertMetadata = formatOutput(path, sampleNames, graphingMeta)
    jsonReportR.jsonR(sampleNames, path, runTrimInsertMetadata, "Collection")
    return runTrimInsertMetadata
Example #4
0
def functionsGoNow(files, path, metadata, fLength, commands):
    # print path, fLength
    commandList = spadesPrepProcesses(files, path, fLength, metadata, commands)
    commandMetadata = metadataFiller.filler(metadata, commandList)
    updatedMetadata = contigFileFormatter(files, path, commandMetadata)
    assembledFiles = completionist(files, path)
    # moreMetadata = pipelineMetadata(path, updatedMetadata, assembledFiles)
    jsonReportR.jsonR(files, path, updatedMetadata, "Collection")
    return updatedMetadata, assembledFiles
Example #5
0
def functionsGoNOW(assembledFiles, path, assemblyMetadata, refFilePath, commands):
    print "\nPerforming GeneSeekr analysis"
    # Clear out any summary reports from a previous iteration of the pipeline
    reportRemover(path)
    # Do everything - uniVec screening, geneSeeking, V-typing, and MLST analysis
    geneSeekrMetadataList = geneSeekrPrepProcesses(assembledFiles, path, assemblyMetadata, refFilePath, commands)
    # print json.dumps(geneSeekrMetadata, sort_keys=True, indent=4, separators=(',', ': '))
    geneSeekrMetadata = metadataFiller.filler(assemblyMetadata, geneSeekrMetadataList)
    jsonReportR.jsonR(assembledFiles, path, geneSeekrMetadata, "Collection")
    return geneSeekrMetadata
Example #6
0
def functionsGoNOW(sampleNames, path, runMetadata, fLength, commands):
    """Run the functions"""
    print('\nPerforming error correction on fastq files.')
    # Removed the multiprocessing aspect of this function - it seemed to be unreliable.
    # Sometimes, fastq files with more data would not be corrected.
    os.chdir(path)
    print "Preparing fastq files for processing"
    prepList = quakePrepProcesses(sampleNames, path, fLength, runMetadata, commands)
    prepMetadata = metadataFiller.filler(runMetadata, prepList)
    print "Determining cut-off values for error correction"
    cutoffList = quakeCutOffProcesses(sampleNames, path, prepMetadata, commands)
    cutoffMetadata = metadataFiller.filler(prepMetadata, cutoffList)
    print "Correcting errors"
    correctList = quakeCorrectProcesses(sampleNames, path, fLength, cutoffMetadata, commands)
    correctMetadata = metadataFiller.filler(runMetadata, correctList)
    # runQuake(sampleNames, path)
    os.chdir(path)
    # Run completionist to determine unprocessable files, and acquire metadata
    runTrimMetadata, correctedList = completionist(sampleNames, path, correctMetadata, fLength)
    # Clean up tmp files
    tmpFileRemover(path, correctedList)
    # Return important variables
    jsonReportR.jsonR(correctedList, path, runTrimMetadata, "Collection")
    return correctedList, runTrimMetadata