Esempio n. 1
0
def loadMAF_parallel(conn, params):

    t = int(params.threads)
    numLoci = aln_file_tools.countMAF(params.alignment)
    if numLoci < 10000:
        print("\t\t\tReading", numLoci, "alignments.")
    else:
        print("\t\t\tReading", numLoci, "alignments... This may take a while.")

    #file chunker call
    file_list = aln_file_tools.maf_chunker(params.alignment, t, params.workdir)

    #print("Files are:",file_list)
    #Initialize multiprocessing pool
    #if 'lock' not in globals():
    lock = multiprocessing.Lock()
    try:
        with multiprocessing.Pool(t, initializer=init,
                                  initargs=(lock, )) as pool:
            func = partial(loadMAF_worker, params.db, params.cov,
                           params.minlen, params.thresh, params.mask,
                           params.maf)
            results = pool.map(func, file_list)
    except Exception as e:
        pool.close()
    pool.close()
    pool.join()

    #reset_lock()
    #Remove chunkfiles
    aln_file_tools.removeChunks(params.workdir)
Esempio n. 2
0
def loadLOCI_parallel(conn, params):
    """
	Format:
	multiprocessing pool.
	Master:
		splits file into n chunks
		creates multiprocessing pool
	Workers:
		read file chunk
		calculate consensus
		grab lock
		INSERT data to SQL database
		release lock
	"""
    t = int(params.threads)

    numLoci = aln_file_tools.countLoci(params.loci)
    if numLoci < 10000:
        print("\t\t\tReading", numLoci, "alignments.")
    else:
        print("\t\t\tReading", numLoci, "alignments... This may take a while.")

    #file chunker call
    file_list = aln_file_tools.loci_chunker(params.loci, t, params.workdir)

    #print("Files are:",file_list)
    #Initialize multiprocessing pool
    #if 'lock' not in globals():
    lock = multiprocessing.Lock()
    try:
        with multiprocessing.Pool(t, initializer=init,
                                  initargs=(lock, )) as pool:
            func = partial(loadLOCI_worker, params.db, params.cov,
                           params.minlen, params.thresh, params.mask,
                           params.maf)
            results = pool.map(func, file_list)
    except Exception as e:
        pool.close()
    pool.close()
    pool.join()

    #reset_lock()
    #Remove chunkfiles
    aln_file_tools.removeChunks(params.workdir)
Esempio n. 3
0
def loadBED_parallel(conn, params):

    t = int(params.threads)

    #file chunker call
    file_list = aln_file_tools.generic_chunker(params.bed, t, params.workdir)

    #print("Files are:",file_list)
    #Initialize multiprocessing pool
    #if 'lock' not in globals():
    lock = multiprocessing.Lock()
    try:
        with multiprocessing.Pool(t, initializer=init,
                                  initargs=(lock, )) as pool:
            func = partial(loadBED_worker, params.db, params.bed_header)
            results = pool.map(func, file_list)
    except Exception as e:
        pool.close()
    pool.close()
    pool.join()

    #reset_lock()
    #Remove chunkfiles
    aln_file_tools.removeChunks(params.workdir)