def run(self): '''run multiple replicates''' if self.data['verbosity'] <= 1: iterations = range(self.data['replicates']) else: widgets = ['{0} : '.format(self.data['name']), Percentage(), ' ', Bar('='), ' ', ETA()] pbar = ProgressBar(widgets=widgets, maxval=self.data['replicates'], term_width=get_terminal_size()[0] - 5) iterations = pbar((i for i in range(self.data['replicates']))) nJobs = max(min(self.data['jobs'], self.data['replicates']), 1) workQueue = Queue() resQueue = Queue() # put all replicates + stop signals in queue for replicate in range(self.data['replicates']): workQueue.put(replicate) for i in range(nJobs): workQueue.put(None) # spawn workers procs = [Process(target = self.calculate, args = (workQueue, resQueue)) for j in range(nJobs)] for p in procs: p.start() # collect the results off the queue for i in iterations: try: self.__save(resQueue.get()) except KeyboardInterrupt as e: raise ValueError("calculator terminated!") for p in procs: p.join() if self.failure_count.value(): env.logger.info("{} invalid replicate(s)".format(self.failure_count.value())) self.data['replicates'] = self.data['replicates'] - self.failure_count.value() return {} if len(self.result) == 0 else dict(list(self.data.items()) + list(self.result.items()))
def __init__(self, args, unknown_args): self.args = args self.unknown_args = unknown_args self.option = checkInput(args) # env.logger = getLogger(max(min(args.verbosity - 1, 2), 0), fn=os.path.splitext(args.output[0])[0], fv=2 if args.verbosity is not 0 else 0) env.logger.debug('\n{0}\n{1}\n{0}'.format( "=" * min(len(args.cmd), 100), args.cmd)) self.logger = env.logger.info if args.verbosity != 1 else printinfo # self.logger('Loading data from [{}] ...'.format(args.data)) if self.option == 1: self.file = SFSFile(args.data) else: self.file = GFile(args.data) self.groups = self.file.getnames() self.logger('{:,d} units found'.format(len(self.groups))) # load non-missing data # to annotate to each variant position wether or not it is missing from assocation analysis # name it chip_file because it mimics the behavior of exome chip design if args.missing_unlisted: self.chip_file = SFSFile(args.missing_unlisted) else: self.chip_file = None # set limit if self.args.limit: self.limit = min(max(1, args.limit), len(self.groups)) self.logger('{:,d} units will be analyzed'.format(self.limit)) else: self.limit = len(self.groups) self.result = ResultManager(args.output, action='w' if not args.append else 'a') if self.args.verbosity == 1: # widgets = [FormatLabel('scanning: unit %(value)d - '), BouncingBar(marker=RotatingMarker())] widgets = [ FormatLabel('scanning: unit %(value)d - '), Percentage(), ' ', Bar('>'), ' ', ETA() ] self.pbar = ProgressBar(widgets=widgets, maxval=self.limit, term_width=get_terminal_size()[0] - 5).start() else: # use each group's progress bar or not progress bar at all self.pbar = ProgressBarNull() # this is buffer object to hold all input dict to a list self.data_buffer = [] if self.args.replicates < 0 else None
def run(self): '''run multiple replicates''' if self.data['verbosity'] <= 1: iterations = range(self.data['replicates']) else: widgets = [ '{0} : '.format(self.data['name']), Percentage(), ' ', Bar('='), ' ', ETA() ] pbar = ProgressBar(widgets=widgets, maxval=self.data['replicates'], term_width=get_terminal_size()[0] - 5) iterations = pbar((i for i in range(self.data['replicates']))) nJobs = max(min(self.data['jobs'], self.data['replicates']), 1) workQueue = Queue() resQueue = Queue() # put all replicates + stop signals in queue for replicate in range(self.data['replicates']): workQueue.put(replicate) for i in range(nJobs): workQueue.put(None) # spawn workers procs = [ Process(target=self.calculate, args=(workQueue, resQueue)) for j in range(nJobs) ] for p in procs: p.start() # collect the results off the queue for i in iterations: try: self.__save(resQueue.get()) except KeyboardInterrupt as e: raise ValueError("calculator terminated!") for p in procs: p.join() if self.failure_count.value(): env.logger.info("{} invalid replicate(s)".format( self.failure_count.value())) self.data['replicates'] = self.data[ 'replicates'] - self.failure_count.value() return {} if len(self.result) == 0 else dict( list(self.data.items()) + list(self.result.items()))
def __init__(self, args, unknown_args): self.args = args self.unknown_args = unknown_args self.option = checkInput(args) # env.logger = getLogger( max(min(args.verbosity - 1, 2), 0), fn=os.path.splitext(args.output[0])[0], fv=2 if args.verbosity is not 0 else 0, ) env.logger.debug("\n{0}\n{1}\n{0}".format("=" * min(len(args.cmd), 100), args.cmd)) self.logger = env.logger.info if args.verbosity != 1 else printinfo # self.logger("Loading data from [{}] ...".format(args.data)) if self.option == 1: self.file = SFSFile(args.data) else: self.file = GFile(args.data) self.groups = self.file.getnames() self.logger("{:,d} units found".format(len(self.groups))) # load non-missing data # to annotate to each variant position wether or not it is missing from assocation analysis # name it chip_file because it mimics the behavior of exome chip design if args.missing_unlisted: self.chip_file = SFSFile(args.missing_unlisted) else: self.chip_file = None # set limit if self.args.limit: self.limit = min(max(1, args.limit), len(self.groups)) self.logger("{:,d} units will be analyzed".format(self.limit)) else: self.limit = len(self.groups) self.result = ResultManager(args.output, action="w" if not args.append else "a") if self.args.verbosity == 1: # widgets = [FormatLabel('scanning: unit %(value)d - '), BouncingBar(marker=RotatingMarker())] widgets = [FormatLabel("scanning: unit %(value)d - "), Percentage(), " ", Bar(">"), " ", ETA()] self.pbar = ProgressBar(widgets=widgets, maxval=self.limit, term_width=get_terminal_size()[0] - 5).start() else: # use each group's progress bar or not progress bar at all self.pbar = ProgressBarNull() # this is buffer object to hold all input dict to a list self.data_buffer = [] if self.args.replicates < 0 else None