def test_get_denoiser_data_dir(self): """get_denoiser_data_dir returns dir with error profiles""" obs = get_denoiser_data_dir() self.assertTrue(exists(obs)) self.assertTrue(exists(obs + 'FLX_error_profile.dat'))
from os import makedirs, remove from os.path import exists from qiime.util import get_tmp_filename from cogent.util.misc import create_dir from qiime.util import parse_command_line_parameters, get_options_lookup,\ make_option from qiime.denoiser.preprocess import STANDARD_BACTERIAL_PRIMER from qiime.denoiser.flowgram_clustering import denoise_seqs, denoise_per_sample from qiime.denoiser.utils import files_exist, get_denoiser_data_dir,\ cat_sff_files options_lookup = get_options_lookup() DENOISER_DATA_DIR = get_denoiser_data_dir() # denoiser.py script_info = {} script_info['brief_description'] = """Remove noise from 454 sequencing data""" script_info[ 'script_description'] = """The denoiser removes sequencing noise characteristic to pyrosequencing by flowgram clustering. For a detailed explanation of the underlying algorithm see (Reeder and Knight, Nature Methods 7(9), 2010).""" script_info['script_usage'] = [ ("", """Run denoiser on flowgrams in 454Reads.sff.txt with read-to-barcode mapping in seqs.fna, put results into Outdir, log progress in Outdir/denoiser.log""", """%prog -i 454Reads.sff.txt -f seqs.fna -v -o Outdir"""), ("Multiple sff.txt files", """Run denoiser on two flowgram files in 454Reads_1.sff.txt and 454Reads_2.sff.txt
), make_option("-p", "--port", action="store", type="int", dest="port", help="Server port " + "[REQUIRED]"), make_option( "-s", "--server_address", action="store", type="string", dest="server", help="Server address" + "[REQUIRED]" ), ] script_info["optional_options"] = [ make_option( "-e", "--error_profile", action="store", type="string", dest="error_profile", help="Path to error profile" + " [DEFAULT: %default]", default=get_denoiser_data_dir() + "FLX_error_profile.dat", ), make_option( "-c", "--counter", action="store", type="int", dest="counter", help="Round counter to start this worker with " + " [default: %default]", default=0, ), ] script_info["version"] = __version__
make_option('-p','--port', action='store',\ type='int', dest='port', help='Server port '+\ '[REQUIRED]'), make_option('-s','--server_address',action='store',\ type='string',dest='server',help='Server address'+\ '[REQUIRED]' ) ] script_info['optional_options']=[ \ make_option('-e','--error_profile',action='store',\ type='string',dest='error_profile',\ help='Path to error profile'+\ ' [DEFAULT: %default]',\ default = get_denoiser_data_dir()+\ 'FLX_error_profile.dat'), make_option('-c','--counter', action='store',\ type='int', dest='counter',\ help='Round counter to start this worker with '+\ ' [default: %default]', default=0) ] script_info['version'] = __version__ def main(commandline_args=None): parser, opts, args = parse_command_line_parameters(**script_info) if not opts.file_path:
make_option('-s', '--server_address', action='store', type='string', dest='server', help='Server address' + '[REQUIRED]') ] script_info['optional_options'] = [ make_option('-e', '--error_profile', action='store', type='string', dest='error_profile', help='Path to error profile' + ' [DEFAULT: %default]', default=get_denoiser_data_dir() + 'FLX_error_profile.dat'), make_option('-c', '--counter', action='store', type='int', dest='counter', help='Round counter to start this worker with ' + ' [default: %default]', default=0) ] script_info['version'] = __version__ def main(commandline_args=None): parser, opts, args = parse_command_line_parameters(**script_info)
from qiime.denoiser.utils import init_flowgram_file, append_to_flowgram_file,\ FlowgramContainerFile, FlowgramContainerArray, make_stats, store_mapping,\ store_clusters, read_denoiser_mapping, check_flowgram_ali_exe,\ sort_seqs_by_clustersize, get_denoiser_data_dir, get_flowgram_ali_exe,\ write_checkpoint, read_checkpoint, sort_mapping_by_size from qiime.denoiser.cluster_utils import setup_cluster, adjust_workers,\ stop_workers, check_workers, ClientHandler,\ save_send, send_flowgram_to_socket from qiime.denoiser.utils import write_sff_header from qiime.denoiser.flowgram_filter import split_sff from qiime.denoiser.preprocess import preprocess, preprocess_on_cluster,\ read_preprocessed_data DENOISER_DATA_DIR = get_denoiser_data_dir() def compute_workload(num_cores, num_flows, spread): """Compute workload for each individual worker num_flows: total number of flows to be processed num_cores: total number of workers available for processing the flows spread: relative performance of the each worker, with 1.0 being nominal processing rate """ # sigma is the sum of the normalized processing velocity scores # for each cluster processor. In a perfect world, sigma == num_cores # and the normalized processing velocity (in spread) == 1.0
make_option('-p', '--port', action='store', type='int', dest='port', help='Server port ' + '[REQUIRED]'), make_option('-s', '--server_address', action='store', type='string', dest='server', help='Server address' + '[REQUIRED]') ] script_info['optional_options'] = [ make_option('-e', '--error_profile', action='store', type='string', dest='error_profile', help='Path to error profile' + ' [DEFAULT: %default]', default=get_denoiser_data_dir() + 'FLX_error_profile.dat'), make_option('-c', '--counter', action='store', type='int', dest='counter', help='Round counter to start this worker with ' + ' [default: %default]', default=0) ] script_info['version'] = __version__ def main(commandline_args=None): parser, opts, args = parse_command_line_parameters(**script_info) if not opts.file_path: