def run(args): """ Main process that: * Instantiates processing context, * Parallelizes file processing with threads pools, * Copies mapfile(s) to the output directory, * Evaluate exit status. :param ArgumentParser args: Command-line arguments parser """ # Instantiate processing context with ProcessingContext(args) as ctx: # If dataset ID is submitted for "show" action skip scan logging.info('==> Scan started') if ctx.use_pool: processes = ctx.pool.imap(process, ctx.sources) else: processes = itertools.imap(process, ctx.sources) # Process supplied sources results = [x for x in processes] # Close progress bar if ctx.pbar: ctx.pbar.close() # Get number of files scanned (including skipped files) ctx.scan_files = len(results) # Get number of scan errors ctx.scan_errors = results.count(None) # Get number of generated mapfiles ctx.nb_map = len(filter(None, set(results))) # Evaluates the scan results to finalize mapfiles writing if evaluate(results): for mapfile in filter(None, set(results)): # Remove mapfile working extension if ctx.action == 'show': # Print mapfiles to be generated if ctx.pbar or ctx.quiet: print(remove(WORKING_EXTENSION, mapfile)) logging.info(remove(WORKING_EXTENSION, mapfile)) elif ctx.action == 'make': # A final mapfile is silently overwritten if already exists os.rename(mapfile, remove(WORKING_EXTENSION, mapfile))
def __iter__(self): """ Yields datasets to process from a text file. Each line may contain the dataset with optional appended ``.v<version>`` or ``#<version>`, and only the part without the version is returned. :returns: The dataset ID without the version :rtype: *iter* """ for source in self.sources: with open(source) as f: for line in f: yield self.attach(remove('((\.v|#)[0-9]+)?\s*$', line))
def run(args): """ Main process that: * Instantiates processing context, * Parallelizes file processing with threads pools, * Copies mapfile(s) to the output directory, * Evaluate exit status. :param ArgumentParser args: Command-line arguments parser """ # Deal with 'quiet' option separately. If set, turn off all output # before creating ProcessingContext, and turn it on only when needed quiet = args.quiet if hasattr(args, 'quiet') else False if quiet: output_control = OutputControl() output_control.stdout_off() # Instantiate processing context with ProcessingContext(args) as ctx: # Init process context cctx = {name: getattr(ctx, name) for name in PROCESS_VARS} # Init progress bar if ctx.use_pool: # Init processes pool pool = Pool(processes=ctx.processes, initializer=initializer, initargs=(cctx.keys(), cctx.values())) processes = pool.imap(process, ctx.sources) else: initializer(cctx.keys(), cctx.values()) processes = itertools.imap(process, ctx.sources) # Process supplied sources results = [x for x in processes] # Close pool of workers if exists if 'pool' in locals().keys(): locals()['pool'].close() locals()['pool'].join() Print.progress('\n') # Flush buffer Print.flush() # Get number of files scanned (excluding errors/skipped files) ctx.scan_data = len(filter(None, results)) # Get number of scan errors ctx.scan_errors = results.count(None) # Get number of generated mapfiles ctx.nbmap = len(filter(None, set(results))) # Evaluates the scan results to finalize mapfiles writing if evaluate(results): for mapfile in filter(None, set(results)): # Remove mapfile working extension if ctx.action == 'show': # Print mapfiles to be generated result = remove(WORKING_EXTENSION, mapfile) if quiet: output_control.stdout_on() print result output_control.stdout_off() else: Print.result(result) elif ctx.action == 'make': # A final mapfile is silently overwritten if already exists os.rename(mapfile, remove(WORKING_EXTENSION, mapfile)) # Evaluate errors and exit with appropriated return code if ctx.scan_errors > 0: sys.exit(ctx.scan_errors)