Esempio n. 1
0
$ ./gen_test.py
"""

# Built-in modules #
import time, datetime

# Internal modules #
from gefes import projects

# Third party modules #
from shell_command import shell_output
import playdoh

# Timer #
now = time.time()

###############################################################################
print "Making test files"

# Do it #
pairs = []
pairs += [(projects['humic'][i].fwd_path, projects['test'][i].fwd_path) for i in range(3)]
pairs += [(projects['humic'][i].rev_path, projects['test'][i].rev_path) for i in range(3)]
process = lambda x : shell_output('zcat %s |head -n 4000| gzip > %s' % (x[0],x[1]))

# Run it in parallel #
playdoh.map(process, pairs, cpu=len(pairs))

# Report Success #
run_time = datetime.timedelta(seconds=round(time.time()-now))
print "\033[0;32mRun time: '%s'\033[0m" % (run_time)
Esempio n. 2
0
    # Record the number of spikes
    Me = PopulationSpikeCounter(Pe)
    Mi = PopulationSpikeCounter(Pi)
    
    net = Network(P, Ce, Ci, Me, Mi)
    
    net.run(1 * second)
    
    return Me.nspikes, Mi.nspikes

if __name__ == '__main__':
    taums = [5]*3
    
    import time
    t1 = time.clock()
    result = playdoh.map(fun, [i for i in taums], cpu=3)
    d = time.clock()-t1
    
    print result
    print "simulation last %.2f seconds with playdoh and %d CPUs" % (d, len(taums))
    
    
    t1 = time.clock()
    result2 = []
    for i in xrange(len(taums)):
        t0 = time.clock()
        r = fun(taums[i])
        d0 = time.clock()-t0
        print "simulation %d last %.2f seconds" % (i, d0)
        result2.append(r)
    d2 = time.clock()-t1
Esempio n. 3
0
}], threads=False)
# Regenerate the early exit for one pool #
illumitag.projects['inga'].first.run_slurm([{
    'make_qiime_output': {}
}, {
    'make_mothur_output': {}
}])
# Just one graph for one pool #
illumitag.projects['evaluation'][0].load().graphs[-1].plot()
# A few pools #
pj = illumitag.projects['test']
[pool() for pool in pj.pools[1:]]
# One function for several pools in parallel #
import playdoh
playdoh.map(lambda p: p.pool_fastqc(),
            illumitag.projects['evaluation'].pools,
            cpu=5)
# All pools via SLURM #
job_ids = [pool.run_slurm() for pool in illumitag.pools]
# And analyses via slurm #
ids = [proj.run_analysis_slurm() for proj in illumitag.projects]

# One project #
pj = illumitag.projects['test']
pj.run_pools()
# One project via slurm #
pj = illumitag.projects['test']
pj.run_pools_slurm()
# Just one statistic for one project #
p = illumitag.projects['evaluation']
p.load()
Esempio n. 4
0
# Just one pool #
pj = illumitag.projects['test']; p = pj[0]; p(threads=False)
# Just one pool via slurm #
pj = illumitag.projects['andrea']; p = pj[2]; p.run_slurm()
num = illumitag.projects['inga'].first.run_slurm()
# Just one function for one pool #
pj = illumitag.projects['test']; p = pj[0]; p(steps=[{'make_pool_plots':{}}], threads=False)
# Regenerate the early exit for one pool #
illumitag.projects['inga'].first.run_slurm([{'make_qiime_output':{}},{'make_mothur_output':{}}])
# Just one graph for one pool #
illumitag.projects['evaluation'][0].load().graphs[-1].plot()
# A few pools #
pj = illumitag.projects['test']; [pool() for pool in pj.pools[1:]]
# One function for several pools in parallel #
import playdoh; playdoh.map(lambda p: p.pool_fastqc(), illumitag.projects['evaluation'].pools, cpu=5)
# All pools via SLURM #
job_ids = [pool.run_slurm() for pool in illumitag.pools]
# And analyses via slurm #
ids = [proj.run_analysis_slurm() for proj in illumitag.projects]

# One project #
pj = illumitag.projects['test']; pj.run_pools()
# One project via slurm #
pj = illumitag.projects['test']; pj.run_pools_slurm()
# Just one statistic for one project #
p = illumitag.projects['evaluation']; p.load(); [pl.good_barcodes.relative_std_dev for pl in p]
# Just one graph for one project #
p = illumitag.projects['evaluation']; p.load(); [illumitag.graphs.pool_plots.AssemblyCounts(pl).plot() for pl in p]
pj = illumitag.projects['evaluation']; pj.load(); pj.graphs[-1].plot()
# Just one function for one project #