if in_args: bowtie_args += argument + " " if argument == "--": argv = sys.argv[: i + 1] in_args = True """Now collect other arguments. While the variable args declared below is global, properties of args are also arguments of the go() function so different command-line arguments can be passed to it for unit tests.""" args = parser.parse_args(argv[1:]) # Start keep_alive thread immediately if args.keep_alive: from dooplicity.tools import KeepAlive keep_alive_thread = KeepAlive(sys.stderr) keep_alive_thread.start() if __name__ == "__main__": import time start_time = time.time() go( bowtie_exe=os.path.expandvars(args.bowtie_exe), bowtie_index_base=os.path.expandvars(args.bowtie_idx), bowtie_args=bowtie_args, gzip_level=args.gzip_level, verbose=args.verbose, report_multiplier=args.report_multiplier, scratch=tempdel.silentexpandvars(args.scratch), )
'--type', type=int, required=False, default=1, help='If 1, assume final fields are ints. If 2, assume final fields ' 'are floats. Otherwise, assume final fields are strings.' ) parser.add_argument( '--keep-alive', action='store_const', const=True, default=False, help='Periodically print Hadoop status messages to stderr to keep ' \ 'job alive' ) args = parser.parse_args() if args.keep_alive: from dooplicity.tools import KeepAlive keep_alive_thread = KeepAlive(sys.stderr) input_line_count, output_line_count = 0, 0 # Must consume a line of stdin before outputting status messages line = sys.stdin.readline() if args.keep_alive: keep_alive_thread.start() if args.type == 1: last_key, totals, write_line = None, [0] * args.value_count, False while True: if not line: if last_key is None: # Input is empty break else:
for i, argument in enumerate(sys.argv[1:]): if in_args: bowtie2_args += argument + ' ' if argument == '--': argv = sys.argv[:i + 1] in_args = True '''Now collect other arguments. While the variable args declared below is global, properties of args are also arguments of the go() function so different command-line arguments can be passed to it for unit tests.''' args = parser.parse_args(argv[1:]) mover = filemover.FileMover(args=args) # Start keep_alive thread immediately if args.keep_alive: from dooplicity.tools import KeepAlive keep_alive_thread = KeepAlive(sys.stderr) keep_alive_thread.start() if __name__ == '__main__' and not args.test: import time start_time = time.time() go(bowtie2_exe=os.path.expandvars(args.bowtie2_exe), bowtie2_index_base=os.path.expandvars(args.bowtie2_idx), bowtie2_args=bowtie2_args, verbose=args.verbose, report_multiplier=args.report_multiplier, stranded=args.stranded, fudge=args.fudge, score_min=args.score_min, mover=mover, intermediate_dir=args.intermediate_dir,
'--type', type=int, required=False, default=1, help='If 1, assume final fields are ints. If 2, assume final fields ' 'are floats. Otherwise, assume final fields are strings.' ) parser.add_argument( '--keep-alive', action='store_const', const=True, default=False, help='Periodically print Hadoop status messages to stderr to keep ' \ 'job alive' ) args = parser.parse_args() if args.keep_alive: from dooplicity.tools import KeepAlive keep_alive_thread = KeepAlive(sys.stderr) input_line_count, output_line_count = 0, 0 # Must consume a line of stdin before outputting status messages line = sys.stdin.readline() if args.keep_alive: keep_alive_thread.start() if args.type == 1: last_key, totals, write_line = None, [0]*args.value_count, False while True: if not line: if last_key is None: # Input is empty break else: