Beispiel #1
0
 def test_chunking_by_multiprocessing(self):
     """[fops] chunk by multiprocessing"""
     input = 'test-data/test.read1.fa'
     f_type, delim = fops.file_type(input)
     chunks = fops.get_chunks(input, delim, split_type='pieces')
     tf = fops.make_chunks(chunks, mp=True)
     assert len(tf) == multiprocessing.cpu_count() - 1
     self.clean(tf)
Beispiel #2
0
 def test_chunking_by_size(self):
     """[fops] chunk by size"""
     input = 'test-data/test.read1.fa'
     f_type, delim = fops.file_type(input)
     chunks = fops.get_chunks(input, delim, mb = 0.1, split_type='size')
     tf = fops.make_chunks(chunks, mp=False)
     # make sure chunks are ~ mb in size.  problem is that the last one will almost alway be off
     # so just round up and check if w/in 1 mb.
     for f in tf:
         sz = os.path.getsize(f)/1024.**2
         self.failUnlessAlmostEqual(math.ceil(sz), 0.1, delta=1)
     self.clean(tf)
Beispiel #3
0
 def test_fastq_file_type(self):
     """[fops] fastq file type and delimiter check"""
     input = 'test-data/test.read1.fq'
     ft, delim = fops.file_type(input)
     assert [ft, delim] == ['fastq','@']
Beispiel #4
0
 def get_chunks(self, input):
     f_type, delim = fops.file_type(input)
     chunks = fops.get_chunks(input, delim, split_type='pieces')
     values = fops.make_chunks(chunks, mp=False)
     return values