def test_write_chunks(self): def f(i): return { "{c}movie_fofn_id".format(c=PipelineChunk.CHUNK_KEY_PREFIX): "/path/to_movie-{i}.fofn".format(i=i), "{c}region_fofn_id".format(c=PipelineChunk.CHUNK_KEY_PREFIX): "/path/rgn_{i}.fofn".format(i=i) } def to_i(i): return "chunk-id-{i}".format(i=i) def to_p(i): return PipelineChunk(to_i(i), **f(i)) nchunks = 5 pipeline_chunks = [to_p(i) for i in range(nchunks)] log.debug(pipeline_chunks) tmp_dir = get_temp_dir("pipeline-chunks") tmp_name = get_temp_file("_chunk.json", tmp_dir) write_pipeline_chunks(pipeline_chunks, tmp_name, "Example chunk file") pchunks = load_pipeline_chunks_from_json(tmp_name) assert len(pchunks) == nchunks
def __args_gather_runner(func, chunk_json, output_file, chunk_key): chunks = load_pipeline_chunks_from_json(chunk_json) # Allow looseness if not chunk_key.startswith('$chunk.'): chunk_key = '$chunk.' + chunk_key log.warn("Prepending chunk key with '$chunk.' to '{c}'".format(c=chunk_key)) fastx_files = _get_datum_from_chunks_by_chunk_key(chunks, chunk_key) _ = func(fastx_files, output_file) return 0
def run_main(chunk_json, fasta_output, chunk_key): chunks = load_pipeline_chunks_from_json(chunk_json) # Allow looseness if not chunk_key.startswith('$chunk.'): chunk_key = '$chunk.' + chunk_key log.warn("Prepending chunk key with '$chunk.' to '{c}'".format(c=chunk_key)) fastx_files = get_datum_from_chunks_by_chunk_key(chunks, chunk_key) _ = gather_fasta(fastx_files, fasta_output) return 0
def __args_gather_runner(func, chunk_json, output_file, chunk_key): chunks = load_pipeline_chunks_from_json(chunk_json) # Allow looseness if not chunk_key.startswith("$chunk."): chunk_key = "$chunk." + chunk_key log.warn("Prepending chunk key with '$chunk.' to '{c}'".format(c=chunk_key)) else: chunk_key = chunk_key fastx_files = _get_datum_from_chunks_by_chunk_key(chunks, chunk_key) _ = func(fastx_files, output_file) return 0
def run_main(chunk_json, fasta_output, chunk_key): chunks = load_pipeline_chunks_from_json(chunk_json) # Allow looseness if not chunk_key.startswith('$chunk.'): chunk_key = '$chunk.' + chunk_key log.warn( "Prepending chunk key with '$chunk.' to '{c}'".format(c=chunk_key)) fastx_files = get_datum_from_chunks_by_chunk_key(chunks, chunk_key) _ = gather_fasta(fastx_files, fasta_output) return 0
def run_main(chunk_json, fofn_output, chunk_key): with cd(os.path.dirname(fofn_output)): chunks = load_pipeline_chunks_from_json(chunk_json) # Allow looseness if not chunk_key.startswith('$chunk.'): chunk_key = '$chunk.' + chunk_key log.warn("Prepending chunk key with '$chunk.' to '{c}'".format(c=chunk_key)) fofn_files = get_datum_from_chunks_by_chunk_key(chunks, chunk_key) print("fofn_files:%s %s" %(repr(fofn_files), repr(fofn_output))) # Combine all into one. with open(fofn_output, 'w') as ofs: for fn in fofn_files: with open(fn) as ifs: ofs.write(ifs.read())
def test_write_chunks(self): def f(i): return {"{c}movie_fofn_id".format(c=PipelineChunk.CHUNK_KEY_PREFIX): "/path/to_movie-{i}.fofn".format(i=i), "{c}region_fofn_id".format(c=PipelineChunk.CHUNK_KEY_PREFIX): "/path/rgn_{i}.fofn".format(i=i)} to_i = lambda i: "chunk-id-{i}".format(i=i) to_p = lambda i: PipelineChunk(to_i(i), **f(i)) nchunks = 5 pipeline_chunks = [to_p(i) for i in xrange(nchunks)] log.debug(pipeline_chunks) tmp_dir = get_temp_dir("pipeline-chunks") tmp_name = get_temp_file("_chunk.json", tmp_dir) write_pipeline_chunks(pipeline_chunks, tmp_name, "Example chunk file") pchunks = load_pipeline_chunks_from_json(tmp_name) self.assertEquals(len(pchunks), nchunks)