Beispiel #1
0
def run(args):
    """
    Main process that:

     * Instantiates processing context,
     * Loads previous program instance,
     * Parallelizes file processing with threads pools,
     * Apply command-line action to the whole DRS tree,
     * Evaluate exit status.

    :param ArgumentParser args: The command-line arguments parser

    """
    # Instantiate processing context
    with ProcessingContext(args) as ctx:
        logging.info('==> Scan started')
        if not ctx.scan:
            reader = load(TREE_FILE)
            _ = reader.next()
            ctx.tree = reader.next()
            ctx.scan_err_log = reader.next()
            results = reader.next()
            # Rollback --commands_file value to command-line argument in any case
            ctx.tree.commands_file = ctx.commands_file
            msg = 'Skipping incoming files scan (use "--rescan" to force it) -- ' \
                  'Using cached DRS tree from {}'.format(TREE_FILE)
            if ctx.pbar:
                print(msg)
            logging.warning(msg)
        else:
            if ctx.use_pool:
                processes = ctx.pool.imap(process, ctx.sources)
            else:
                processes = itertools.imap(process, ctx.sources)
            # Process supplied files
            results = [x for x in processes]
        # Close progress bar
        if ctx.pbar:
            ctx.pbar.close()
        # Get number of files scanned (including skipped files)
        ctx.scan_files = len(results)
        # Get number of scan errors
        ctx.scan_errors = results.count(None)
        # Backup tree context for later usage with other command lines
        store(TREE_FILE, data=[{key: ctx.__getattribute__(key) for key in CONTROLLED_ARGS},
                               ctx.tree,
                               ctx.scan_err_log,
                               results])
        logging.warning('DRS tree recorded for next usage onto {}.'.format(TREE_FILE))
        # Evaluates the scan results to trigger the DRS tree action
        if evaluate(results):
            # Check upgrade uniqueness
            ctx.tree.check_uniqueness(ctx.checksum_type)
            # Apply tree action
            ctx.tree.get_display_lengths()
            getattr(ctx.tree, ctx.action)()
Beispiel #2
0
def run(args):
    """
    Main process that:

     * Instantiates processing context,
     * Parallelizes file processing with threads pools,
     * Copies mapfile(s) to the output directory,
     * Evaluate exit status.

    :param ArgumentParser args: Command-line arguments parser

    """
    # Instantiate processing context
    with ProcessingContext(args) as ctx:
        # If dataset ID is submitted for "show" action skip scan
        logging.info('==> Scan started')
        if ctx.use_pool:
            processes = ctx.pool.imap(process, ctx.sources)
        else:
            processes = itertools.imap(process, ctx.sources)
        # Process supplied sources
        results = [x for x in processes]
        # Close progress bar
        if ctx.pbar:
            ctx.pbar.close()
        # Get number of files scanned (including skipped files)
        ctx.scan_files = len(results)
        # Get number of scan errors
        ctx.scan_errors = results.count(None)
        # Get number of generated mapfiles
        ctx.nb_map = len(filter(None, set(results)))
        # Evaluates the scan results to finalize mapfiles writing
        if evaluate(results):
            for mapfile in filter(None, set(results)):
                # Remove mapfile working extension
                if ctx.action == 'show':
                    # Print mapfiles to be generated
                    if ctx.pbar or ctx.quiet:
                        print(remove(WORKING_EXTENSION, mapfile))
                    logging.info(remove(WORKING_EXTENSION, mapfile))
                elif ctx.action == 'make':
                    # A final mapfile is silently overwritten if already exists
                    os.rename(mapfile, remove(WORKING_EXTENSION, mapfile))
Beispiel #3
0
def run(args):
    """
    Main process that:

     * Instantiates processing context,
     * Loads previous program instance,
     * Parallelizes file processing with threads pools,
     * Apply command-line action to the whole DRS tree,
     * Evaluate exit status.

    :param ArgumentParser args: The command-line arguments parser

    """
    # Instantiate processing context
    with ProcessingContext(args) as ctx:
        # Init global variable
        global tree
        # Init DRS tree
        tree = DRSTree(ctx.root, ctx.version, ctx.mode, ctx.commands_file)
        # Init process context
        cctx = {name: getattr(ctx, name) for name in PROCESS_VARS}
        # Disable file scan if a previous DRS tree have generated using same context and no "list" action
        if do_scanning(ctx):
            if ctx.use_pool:
                # Init processes pool
                pool = Pool(processes=ctx.processes,
                            initializer=initializer,
                            initargs=(list(cctx.keys()), list(cctx.values())))
                processes = pool.imap(process, ctx.sources)
            else:
                initializer(list(cctx.keys()), list(cctx.values()))
                processes = map(process, ctx.sources)
            # Process supplied sources
            handlers = [x for x in processes]
            # Close pool of workers if exists
            if 'pool' in list(locals().keys()):
                locals()['pool'].close()
                locals()['pool'].join()
            Print.progress('\n')
            # Build DRS tree
            cctx['progress'].value = 0
            initializer(list(cctx.keys()), list(cctx.values()))
            handlers = [h for h in handlers if h is not None]
            results = [x for x in map(tree_builder, handlers)]
            Print.progress('\n')
        else:
            reader = load(TREE_FILE)
            msg = 'Skip incoming files scan (use "--rescan" to force it) -- '
            msg += 'Using cached DRS tree from {}'.format(TREE_FILE)
            Print.warning(msg)
            _ = next(reader)
            tree = next(reader)
            handlers = next(reader)
            results = next(reader)
        # Flush buffer
        Print.flush()
        # Rollback --commands-file value to command-line argument in any case
        tree.commands_file = ctx.commands_file
        # Get number of files scanned (including errors/skipped files)
        ctx.scan_data = len(results)
        # Get number of scan errors
        ctx.scan_errors = results.count(None)
        # Backup tree context for later usage with other command lines
        store(
            TREE_FILE,
            data=[{key: ctx.__getattribute__(key)
                   for key in CONTROLLED_ARGS}, tree, handlers, results])
        Print.info(TAGS.INFO +
                   'DRS tree recorded for next usage onto {}.'.format(
                       COLORS.HEADER(TREE_FILE)))
        # Evaluates the scan results to trigger the DRS tree action
        if evaluate(results):
            # Check upgrade uniqueness
            tree.check_uniqueness()
            # Apply tree action
            tree.get_display_lengths()
            getattr(tree, ctx.action)()
    # Evaluate errors and exit with appropriated return code
    if ctx.scan_errors > 0:
        sys.exit(ctx.scan_errors)
Beispiel #4
0
def run(args):
    """
    Main process that:

     * Instantiates processing context,
     * Parallelizes file processing with threads pools,
     * Copies mapfile(s) to the output directory,
     * Evaluate exit status.

    :param ArgumentParser args: Command-line arguments parser

    """

    # Deal with 'quiet' option separately. If set, turn off all output
    # before creating ProcessingContext, and turn it on only when needed
    quiet = args.quiet if hasattr(args, 'quiet') else False
    if quiet:
        output_control = OutputControl()
        output_control.stdout_off()

    # Instantiate processing context
    with ProcessingContext(args) as ctx:
        # Init process context
        cctx = {name: getattr(ctx, name) for name in PROCESS_VARS}
        # Init progress bar
        if ctx.use_pool:
            # Init processes pool
            pool = Pool(processes=ctx.processes,
                        initializer=initializer,
                        initargs=(cctx.keys(), cctx.values()))
            processes = pool.imap(process, ctx.sources)
        else:
            initializer(cctx.keys(), cctx.values())
            processes = itertools.imap(process, ctx.sources)
        # Process supplied sources
        results = [x for x in processes]
        # Close pool of workers if exists
        if 'pool' in locals().keys():
            locals()['pool'].close()
            locals()['pool'].join()
        Print.progress('\n')
        # Flush buffer
        Print.flush()
        # Get number of files scanned (excluding errors/skipped files)
        ctx.scan_data = len(filter(None, results))
        # Get number of scan errors
        ctx.scan_errors = results.count(None)
        # Get number of generated mapfiles
        ctx.nbmap = len(filter(None, set(results)))
        # Evaluates the scan results to finalize mapfiles writing
        if evaluate(results):
            for mapfile in filter(None, set(results)):
                # Remove mapfile working extension
                if ctx.action == 'show':
                    # Print mapfiles to be generated
                    result = remove(WORKING_EXTENSION, mapfile)
                    if quiet:
                        output_control.stdout_on()
                        print result
                        output_control.stdout_off()
                    else:
                        Print.result(result)
                elif ctx.action == 'make':
                    # A final mapfile is silently overwritten if already exists
                    os.rename(mapfile, remove(WORKING_EXTENSION, mapfile))
    # Evaluate errors and exit with appropriated return code
    if ctx.scan_errors > 0:
        sys.exit(ctx.scan_errors)