Ejemplo n.º 1
0
            def log_expr_val(expr_val, reused):
                # Consider that PrebuiltOperator reuse values instead of
                # actually computing them.
                if isinstance(expr_val.expr.op, engine.PrebuiltOperator):
                    reused = True

                if reused:
                    msg = 'Reusing already computed {id} {uuid}'
                    reused_expr_val_set.add(expr_val)
                else:
                    msg = 'Computed {id} {uuid}'
                    computed_expr_val_set.add(expr_val)

                op = expr_val.expr.op
                if (op.callable_ not in hidden_callable_set and
                        not issubclass(op.value_type, engine.ForcedParamType)):
                    log_f = info
                else:
                    log_f = debug

                log_f(
                    msg.format(
                        id=expr_val.get_id(
                            full_qual=False,
                            with_tags=True,
                            hidden_callable_set=hidden_callable_set,
                        ),
                        uuid=get_uuid_str(expr_val),
                    ))

                # Drop into the debugger if we got an exception
                excep = expr_val.excep
                if use_pdb and excep is not NoValue:
                    error(utils.format_exception(excep))
                    pdb.post_mortem(excep.__traceback__)
Ejemplo n.º 2
0
 def excep_handler(module, e):
     nonlocal exit_after_import
     error('Could not import "{}":\n{}'.format(
         module,
         utils.format_exception(e),
     ))
     exit_after_import = True
Ejemplo n.º 3
0
        def wrapper(self, *args, **kwargs):
            try:
                f(self, *args, **kwargs)
            except TestResult as e:
                result = e
            except AssertionError as e:
                result = TestResult(
                    TestResultStatus.FAILED, 'failed assertion: {}\n{}'.format(
                        e, indent(utils.format_exception(e))), [])
            else:
                result = TestResult(TestResultStatus.PASSED)

            if result.expr_list is None:
                result.expr_list = self.expr_list

            return result
Ejemplo n.º 4
0
def main(argv=sys.argv[1:]):
    return_code = 0

    try:
        return_code = _main(argv)
    # Quietly exit for these exceptions
    except SILENT_EXCEPTIONS:
        pass
    except SystemExit as e:
        return_code = e.code
    # Catch-all
    except Exception as e:
        formatted_excep = 'Exception traceback:\n' + utils.format_exception(e)
        if show_traceback:
            error(formatted_excep)
        else:
            # Still get the traceback it in debug log
            debug(formatted_excep)

        # Always show the concise message
        error(e)
        return_code = GENERIC_ERROR_CODE

    sys.exit(return_code)
Ejemplo n.º 5
0
def exec_expr_list(iteration_expr_list, adaptor, artifact_dir,
                   testsession_uuid, hidden_callable_set,
                   only_template_scripts, adaptor_cls, verbose, save_db,
                   use_pdb):

    if not only_template_scripts:
        with (artifact_dir / 'UUID').open('wt') as f:
            f.write(testsession_uuid + '\n')

        (artifact_dir / 'BY_UUID').mkdir()

    out('\nArtifacts dir: {}\n'.format(artifact_dir))

    for expr in utils.flatten_seq(iteration_expr_list):
        expr_short_id = expr.get_id(
            hidden_callable_set=hidden_callable_set,
            with_tags=False,
            full_qual=False,
            qual=False,
        )

        data = expr.data
        data['id'] = expr_short_id
        data['uuid'] = expr.uuid

        expr_artifact_dir = pathlib.Path(artifact_dir, expr_short_id,
                                         expr.uuid)
        expr_artifact_dir.mkdir(parents=True)
        expr_artifact_dir = expr_artifact_dir.resolve()
        data['artifact_dir'] = artifact_dir
        data['expr_artifact_dir'] = expr_artifact_dir

        with (expr_artifact_dir / 'UUID').open('wt') as f:
            f.write(expr.uuid + '\n')

        with (expr_artifact_dir / 'ID').open('wt') as f:
            f.write(expr_short_id + '\n')

        with (expr_artifact_dir / 'STRUCTURE').open('wt') as f:
            f.write(
                expr.get_id(
                    hidden_callable_set=hidden_callable_set,
                    with_tags=False,
                    full_qual=True,
                ) + '\n\n')
            f.write(expr.format_structure() + '\n')

        is_svg, dot_output = utils.render_graphviz(expr)
        graphviz_path = expr_artifact_dir / 'STRUCTURE.{}'.format(
            'svg' if is_svg else 'dot')
        with graphviz_path.open('wt', encoding='utf-8') as f:
            f.write(dot_output)

        with (expr_artifact_dir / 'EXPRESSION_TEMPLATE.py').open(
                'wt', encoding='utf-8') as f:
            f.write(
                expr.get_script(
                    prefix='expr',
                    db_path=os.path.join('..', utils.DB_FILENAME),
                    db_relative_to='__file__',
                )[1] + '\n', )

    if only_template_scripts:
        return 0

    # Preserve the execution order, so the summary is displayed in the same
    # order
    result_map = collections.OrderedDict()
    for i, expr_list in enumerate(iteration_expr_list):
        i += 1
        info('Iteration #{}\n'.format(i))

        for expr in expr_list:
            exec_start_msg = 'Executing: {short_id}\n\nID: {full_id}\nArtifacts: {folder}\nUUID: {uuid_}'.format(
                short_id=expr.get_id(
                    hidden_callable_set=hidden_callable_set,
                    full_qual=False,
                    qual=False,
                ),
                full_id=expr.get_id(
                    hidden_callable_set=hidden_callable_set
                    if not verbose else None,
                    full_qual=True,
                ),
                folder=expr.data['expr_artifact_dir'],
                uuid_=expr.uuid).replace('\n', '\n# ')

            delim = '#' * (len(exec_start_msg.splitlines()[0]) + 2)
            out(delim + '\n# ' + exec_start_msg + '\n' + delim)

            result_list = list()
            result_map[expr] = result_list

            def pre_line():
                out('-' * 40)

            # Make sure that all the output of the expression is flushed to ensure
            # there won't be any buffered stderr output being displayed after the
            # "official" end of the Expression's execution.

            def flush_std_streams():
                sys.stdout.flush()
                sys.stderr.flush()

            def get_uuid_str(expr_val):
                return 'UUID={}'.format(expr_val.uuid)

            computed_expr_val_set = set()
            reused_expr_val_set = set()

            def log_expr_val(expr_val, reused):
                # Consider that PrebuiltOperator reuse values instead of
                # actually computing them.
                if isinstance(expr_val.expr.op, engine.PrebuiltOperator):
                    reused = True

                if reused:
                    msg = 'Reusing already computed {id} {uuid}'
                    reused_expr_val_set.add(expr_val)
                else:
                    msg = 'Computed {id} {uuid}'
                    computed_expr_val_set.add(expr_val)

                op = expr_val.expr.op
                if (op.callable_ not in hidden_callable_set and
                        not issubclass(op.value_type, engine.ForcedParamType)):
                    log_f = info
                else:
                    log_f = debug

                log_f(
                    msg.format(
                        id=expr_val.get_id(
                            full_qual=False,
                            with_tags=True,
                            hidden_callable_set=hidden_callable_set,
                        ),
                        uuid=get_uuid_str(expr_val),
                    ))

                # Drop into the debugger if we got an exception
                excep = expr_val.excep
                if use_pdb and excep is not NoValue:
                    error(utils.format_exception(excep))
                    pdb.post_mortem(excep.__traceback__)

            def get_duration_str(expr_val):
                if expr_val.duration is None:
                    duration = ''
                else:
                    duration = '{:.2f}s'.format(expr_val.duration)

                cumulative = expr_val.cumulative_duration
                cumulative = ' (cumulative: {:.2f}s)'.format(
                    cumulative) if cumulative else ''

                return '{}{}'.format(duration, cumulative)

            # This returns an iterator
            executor = expr.execute(log_expr_val)

            out('')
            for result in utils.iterate_cb(executor, pre_line,
                                           flush_std_streams):
                for excep_val in result.get_excep():
                    excep = excep_val.excep
                    tb = utils.format_exception(excep)
                    error(
                        '{e_name}: {e}\nID: {id}\n{tb}'.format(
                            id=excep_val.get_id(),
                            e_name=utils.get_name(type(excep)),
                            e=excep,
                            tb=tb,
                        ), )

                prefix = 'Finished {uuid} in {duration} '.format(
                    uuid=get_uuid_str(result),
                    duration=get_duration_str(result),
                )
                out('{prefix}{id}'.format(
                    id=result.get_id(
                        full_qual=False,
                        qual=False,
                        mark_excep=True,
                        with_tags=True,
                        hidden_callable_set=hidden_callable_set,
                    ).strip().replace('\n', '\n' + len(prefix) * ' '),
                    prefix=prefix,
                ))

                out(adaptor.format_result(result))
                result_list.append(result)

            out('')
            expr_artifact_dir = expr.data['expr_artifact_dir']

            # Finalize the computation
            adaptor.finalize_expr(expr)

            # Dump the reproducer script
            with (expr_artifact_dir / 'EXPRESSION.py').open(
                    'wt', encoding='utf-8') as f:
                f.write(
                    expr.get_script(
                        prefix='expr',
                        db_path=os.path.join('..', '..', utils.DB_FILENAME),
                        db_relative_to='__file__',
                    )[1] + '\n', )

            def format_uuid(expr_val_list):
                uuid_list = sorted(
                    {expr_val.uuid
                     for expr_val in expr_val_list})
                return '\n'.join(uuid_list)

            def write_uuid(path, *args):
                with path.open('wt') as f:
                    f.write(format_uuid(*args) + '\n')

            write_uuid(expr_artifact_dir / 'VALUES_UUID', result_list)
            write_uuid(expr_artifact_dir / 'REUSED_VALUES_UUID',
                       reused_expr_val_set)
            write_uuid(expr_artifact_dir / 'COMPUTED_VALUES_UUID',
                       computed_expr_val_set)

            # From there, use a relative path for symlinks
            expr_artifact_dir = pathlib.Path(
                '..', expr_artifact_dir.relative_to(artifact_dir))
            computed_uuid_set = {
                expr_val.uuid
                for expr_val in computed_expr_val_set
            }
            computed_uuid_set.add(expr.uuid)
            for uuid_ in computed_uuid_set:
                (artifact_dir / 'BY_UUID' /
                 uuid_).symlink_to(expr_artifact_dir)

    if save_db:
        db = engine.ValueDB(
            engine.FrozenExprValSeq.from_expr_list(
                utils.flatten_seq(iteration_expr_list),
                hidden_callable_set=hidden_callable_set,
            ),
            adaptor_cls=adaptor_cls,
        )

        db_path = artifact_dir / utils.DB_FILENAME
        db.to_path(db_path)
        relative_db_path = db_path.relative_to(artifact_dir)
    else:
        relative_db_path = None
        db = None

    out('#' * 80)
    info('Artifacts dir: {}'.format(artifact_dir))
    info('Result summary:')

    # Display the results summary
    summary = adaptor.get_summary(result_map)
    out(summary)
    with (artifact_dir / 'SUMMARY').open('wt', encoding='utf-8') as f:
        f.write(summary + '\n')

    # Output the merged script with all subscripts
    script_path = artifact_dir / 'ALL_SCRIPTS.py'
    result_name_map, all_scripts = engine.Expression.get_all_script(
        utils.flatten_seq(iteration_expr_list),
        prefix='expr',
        db_path=relative_db_path,
        db_relative_to='__file__',
        db=db,
        adaptor_cls=adaptor_cls,
    )

    with script_path.open('wt', encoding='utf-8') as f:
        f.write(all_scripts + '\n')

    return adaptor.get_run_exit_code(result_map)
Ejemplo n.º 6
0
 def format_error(expr_val):
     return '{}:\n{}'.format(
         expr_val.get_id(qual=False),
         indent(utils.format_exception(expr_val.excep))
     )
Ejemplo n.º 7
0
def do_run(args, parser, run_parser, argv):
    # Import all modules, before selecting the adaptor
    def best_effort(mod, e):
        return

    import_error_code = 1

    module_set = set()
    for path in args.python_files:
        try:
            imported = utils.import_modules([path], excep_handler=best_effort)
        # This might fail, since some adaptor options may introduce "fake"
        # positional arguments, since these options are not registered yet.
        except Exception:
            pass
        else:
            module_set.update(imported)

    # Look for a customization submodule in one of the parent packages of the
    # modules we specified on the command line.
    try:
        utils.find_customization_module_set(module_set)
    except Exception as e:
        error('Could not import the customization module:\n{}'.format(
            utils.format_exception(e),
        ))
        return import_error_code

    adaptor_name = args.adaptor
    adaptor_cls = AdaptorBase.get_adaptor_cls(adaptor_name)
    if not adaptor_cls:
        if adaptor_name:
            raise RuntimeError('Adaptor "{}" cannot be found'.format(adaptor_name))
        else:
            raise RuntimeError('No adaptor was found')
    # Add all the CLI arguments of the adaptor before reparsing the
    # command line.
    # adaptor_group = utils.create_adaptor_parser_group(run_parser, adaptor_cls)
    adaptor_group = run_parser
    adaptor_cls.register_run_param(adaptor_group)

    # Reparse the command line after the adaptor had a chance to add its own
    # arguments.
    args = parser.parse_args(argv)

    # Re-import now that we are sure to have the correct list of sources
    exit_after_import = False
    def excep_handler(module, e):
        nonlocal exit_after_import
        error('Could not import "{}":\n{}'.format(
            module,
            utils.format_exception(e),
        ))
        exit_after_import = True

    module_set = utils.import_modules(args.python_files, excep_handler=excep_handler)

    if exit_after_import:
        return import_error_code

    # Make sure the module in which adaptor_cls is defined is used
    module_set.add(inspect.getmodule(adaptor_cls))

    verbose = args.verbose
    use_pdb = args.pdb or args.replay
    save_db = args.save_value_db

    iteration_nr = args.n
    shared_pattern_set = set(args.share)
    random_order = args.random_order

    adaptor = adaptor_cls(args)

    only_list = args.list
    only_template_scripts = args.template_scripts

    rst_expr_list = args.rst_list
    if rst_expr_list:
        only_list = True

    type_goal_pattern_set = set(args.goal)
    callable_goal_pattern_set = set(args.callable_goal)

    if not (type_goal_pattern_set or callable_goal_pattern_set):
        type_goal_pattern_set = set(adaptor_cls.get_default_type_goal_pattern_set())

    load_db_path_list = args.load_db
    load_db_pattern_list = args.load_type
    load_db_uuid_list = args.load_uuid
    load_db_replay_uuid = args.replay
    load_db_uuid_args = load_db_replay_uuid or args.load_uuid_args

    user_filter_set = set(args.select)
    user_filter_set.update(args.select_multiple)

    if load_db_replay_uuid and user_filter_set:
        run_parser.error('--replay and --select cannot be used at the same time')

    if load_db_replay_uuid and not load_db_path_list:
        run_parser.error('--load-db must be specified to use --replay')

    restricted_pattern_set = set(args.restrict)
    forbidden_pattern_set = set(args.forbid)
    allowed_pattern_set = set(args.allow)
    allowed_pattern_set.update(restricted_pattern_set)
    allowed_pattern_set.update(callable_goal_pattern_set)
    artifact_dir_link = args.symlink_artifact_dir_to

    # Setup the artifact_dir so we can create a verbose log in there
    date = datetime.datetime.now().strftime('%Y%m%d_%H:%M:%S')
    testsession_uuid = utils.create_uuid()
    if only_template_scripts:
        artifact_dir = pathlib.Path(only_template_scripts)
    elif args.artifact_dir:
        artifact_dir = pathlib.Path(args.artifact_dir)
    # If we are not given a specific folder, we create one under the root we
    # were given
    else:
        artifact_dir = pathlib.Path(args.artifact_root, date + '_' + testsession_uuid)

    if only_list:
        debug_log = None
        info_log = None
    else:
        artifact_dir.mkdir(parents=True)
        if artifact_dir_link:
            if artifact_dir_link.exists() and not artifact_dir_link.is_symlink():
                raise ValueError('This is not a symlink and will not be overwritten: {}'.format(
                    artifact_dir_link))
            with contextlib.suppress(FileNotFoundError):
                artifact_dir_link.unlink()
            artifact_dir_link.symlink_to(artifact_dir, target_is_directory=True)

        artifact_dir = artifact_dir.resolve()
        # Update the CLI arguments so the customization module has access to the
        # correct value
        args.artifact_dir = artifact_dir
        debug_log = artifact_dir / 'DEBUG.log'
        info_log = artifact_dir / 'INFO.log'

    utils.setup_logging(args.log_level, debug_log, info_log, verbose=verbose)

    # Get the set of all callables in the given set of modules
    callable_pool = utils.get_callable_set(module_set, verbose=verbose)

    # Build the pool of operators from the callables
    non_reusable_type_set = set(utils.flatten_seq(
        utils.get_subclasses(cls)
        for cls in adaptor.get_non_reusable_type_set()
    ))

    op_set = build_op_set(
        callable_pool, non_reusable_type_set, allowed_pattern_set, adaptor,
    )

    # If we load some PrebuiltOperator from the DB, we want to keep them in
    # order so that replayed expressions will be replayed in the same order,
    # making it much easier to correlate logs, so from now on, use an
    # OrderedSet()
    op_set = OrderedSet(op_set)

    # Load objects from an existing database
    if load_db_path_list:
        db_list = []
        for db_path in load_db_path_list:
            db = engine.ValueDB.from_path(db_path)
            op_set.update(
                load_from_db(db, adaptor, non_reusable_type_set,
                    load_db_pattern_list, load_db_uuid_list, load_db_uuid_args
                )
            )
            db_list.append(db)
    # Get the prebuilt operators from the adaptor
    else:
        db_list = []
        op_set.update(adaptor.get_prebuilt_op_set())

    # Force some parameter values to be provided with a specific callable
    patch_map = build_patch_map(args.sweep, args.param, op_set)
    op_set.update(apply_patch_map(patch_map, adaptor))

    # Some operators are hidden in IDs since they don't add useful information
    # (internal classes)
    hidden_callable_set = {
        op.callable_
        for op in adaptor.get_hidden_op_set(op_set)
    }

    # These get_id() options are used for all user-exposed listing that is supposed to be
    # filterable with user_filter_set (like only_list)
    filterable_id_kwargs = dict(
        full_qual=False,
        qual=False,
        with_tags=False,
        hidden_callable_set=hidden_callable_set
    )

    # Restrict the Expressions that will be executed to just the one we
    # care about
    if db_list and load_db_replay_uuid:
        id_kwargs = copy.copy(filterable_id_kwargs)
        del id_kwargs['hidden_callable_set']
        # Let the merge logic handle duplicated UUIDs
        db = engine.ValueDB.merge(db_list)
        user_filter_set = {
            db.get_by_uuid(load_db_replay_uuid).get_id(**id_kwargs)
        }

    # Only print once per parameters' tuple
    if verbose:
        @utils.once
        def handle_non_produced(cls_name, consumer_name, param_name, callable_path):
            info('Nothing can produce instances of {cls} needed for {consumer} (parameter "{param}", along path {path})'.format(
                cls=cls_name,
                consumer=consumer_name,
                param=param_name,
                path=' -> '.join(utils.get_name(callable_) for callable_ in callable_path)
            ))

        @utils.once
        def handle_cycle(path):
            error('Cyclic dependency detected: {path}'.format(
                path=' -> '.join(
                    utils.get_name(callable_)
                    for callable_ in path
                )
            ))
    else:
        handle_non_produced = 'ignore'
        handle_cycle = 'ignore'

    # Get the callable goals, either by the callable name or the value type
    root_op_set = OrderedSet([
        op for op in op_set
        if (
            utils.match_name(op.get_name(full_qual=True), callable_goal_pattern_set)
            or
            # All producers of the goal types can be a root operator in the
            # expressions we are going to build, i.e. the outermost function call
            utils.match_base_cls(op.value_type, type_goal_pattern_set)
            # Only keep the Expression where the outermost (root) operator is
            # defined in one of the files that were explicitely specified on the
            # command line.
        ) and inspect.getmodule(op.callable_) in module_set
    ])

    # Build the class context from the set of Operator's that we collected
    class_ctx = engine.ClassContext.from_op_set(
        op_set=op_set,
        forbidden_pattern_set=forbidden_pattern_set,
        restricted_pattern_set=restricted_pattern_set
    )

    # Build the list of Expression that can be constructed from the set of
    # callables
    expr_list = class_ctx.build_expr_list(
        root_op_set,
        non_produced_handler=handle_non_produced,
        cycle_handler=handle_cycle,
    )

    # First, sort with the fully qualified ID so we have the strongest stability
    # possible from one run to another
    expr_list.sort(key=lambda expr: expr.get_id(full_qual=True, with_tags=True))
    # Then sort again according to what will be displayed. Since it is a stable
    # sort, it will keep a stable order for IDs that look the same but actually
    # differ in their hidden part
    expr_list.sort(key=lambda expr: expr.get_id(qual=False, with_tags=True))

    if random_order:
        random.shuffle(expr_list)

    if user_filter_set:
        expr_list = [
            expr for expr in expr_list
            if utils.match_name(expr.get_id(**filterable_id_kwargs), user_filter_set)
        ]

    if not expr_list:
        info('Nothing to do, check --help while passing some python sources to get the full help.')
        return 1

    id_kwargs = {
        **filterable_id_kwargs,
        'full_qual': bool(verbose),
    }

    if rst_expr_list:
        id_kwargs['style'] = 'rst'
        for expr in expr_list:
            out('* {}'.format(expr.get_id(**id_kwargs)))
    else:
        out('The following expressions will be executed:\n')
        for expr in expr_list:
            out(expr.get_id(**id_kwargs))

            if verbose >= 2:
                out(expr.format_structure() + '\n')

        formatted_out = adaptor.format_expr_list(expr_list, verbose=verbose)
        if formatted_out:
            out('\n' + formatted_out + '\n')

    if only_list:
        return 0

    # Get a list of ComputableExpression in order to execute them
    expr_list = engine.ComputableExpression.from_expr_list(expr_list)

    if iteration_nr > 1:
        shared_op_set = {
            # We don't allow matching on root operators, since that would be
            # pointless. Sharing root operators basically means doing the work
            # once, and then reusing everything at every iteration.
            op for op in (op_set - root_op_set)
            if utils.match_base_cls(op.value_type, shared_pattern_set)
        }
        def predicate(expr): return expr.op not in shared_op_set

        iteration_expr_list = [
            # Apply CSE within each iteration
            engine.ComputableExpression.cse(
                expr.clone_by_predicate(predicate)
                for expr in expr_list
            )
            for i in range(iteration_nr)
        ]
    else:
        iteration_expr_list = [expr_list]

    # Make sure all references to Consumer are cloned appropriately
    for expr in utils.flatten_seq(iteration_expr_list):
        expr.prepare_execute()

    exec_ret_code = exec_expr_list(
        iteration_expr_list=iteration_expr_list,
        adaptor=adaptor,
        artifact_dir=artifact_dir,
        testsession_uuid=testsession_uuid,
        hidden_callable_set=hidden_callable_set,
        only_template_scripts=only_template_scripts,
        adaptor_cls=adaptor_cls,
        verbose=verbose,
        save_db=save_db,
        use_pdb=use_pdb,
    )

    # If we reloaded a DB, merge it with the current DB so the outcome is a
    # self-contained artifact dir
    if load_db_path_list and save_db:
        orig_list = [
            path if path.is_dir() else path.parent
            for path in map(pathlib.Path, load_db_path_list)
        ]
        do_merge(orig_list, artifact_dir, output_exist=True)

    return exec_ret_code