コード例 #1
0
def build_op_set(callable_pool, non_reusable_type_set, allowed_pattern_set,
                 adaptor):
    op_set = {
        engine.Operator(callable_,
                        non_reusable_type_set=non_reusable_type_set,
                        tags_getter=adaptor.get_tags)
        for callable_ in callable_pool
    }

    filtered_op_set = adaptor.filter_op_set(op_set)
    # Make sure we have all the explicitely allowed operators
    filtered_op_set.update(
        op for op in op_set
        if utils.match_name(op.get_name(full_qual=True), allowed_pattern_set))
    return filtered_op_set
コード例 #2
0
def build_patch_map(sweep_spec_list, param_spec_list, op_set):
    sweep_spec_list = copy.copy(sweep_spec_list)
    # Make a sweep spec from a simpler param spec
    sweep_spec_list.extend(
        (callable_pattern, param, value, value, 1)
        for callable_pattern, param, value in param_spec_list)

    patch_map = dict()
    for callable_pattern, param, start, stop, step in sweep_spec_list:
        for op in op_set:
            callable_ = op.callable_
            callable_name = utils.get_name(callable_, full_qual=True)
            if not utils.match_name(callable_name, [callable_pattern]):
                continue
            patch_map.setdefault(op, dict()).setdefault(param, []).extend(
                utils.sweep_param(callable_, param, start, stop, step))
    return patch_map
コード例 #3
0
def do_run(args, parser, run_parser, argv):
    # Import all modules, before selecting the adaptor
    module_set = set()
    for path in args.python_files:
        # This might fail, since some adaptor options may introduce "fake"
        # positional arguments, since these options are not registered yet.
        with contextlib.suppress(ValueError, ImportError):
            module_set.update(utils.import_modules([path], best_effort=True))

    # Look for a customization submodule in one of the parent packages of the
    # modules we specified on the command line.
    utils.find_customization_module_set(module_set)

    adaptor_name = args.adaptor
    adaptor_cls = AdaptorBase.get_adaptor_cls(adaptor_name)
    if not adaptor_cls:
        if adaptor_name:
            raise RuntimeError(
                'Adaptor "{}" cannot be found'.format(adaptor_name))
        else:
            raise RuntimeError('No adaptor was found')
    # Add all the CLI arguments of the adaptor before reparsing the
    # command line.
    # adaptor_group = utils.create_adaptor_parser_group(run_parser, adaptor_cls)
    adaptor_group = run_parser
    adaptor_cls.register_run_param(adaptor_group)

    # Reparse the command line after the adaptor had a chance to add its own
    # arguments.
    args = parser.parse_args(argv)

    # Re-import now that we are sure to have the correct list of sources
    module_set = utils.import_modules(args.python_files)

    # Make sure the module in which adaptor_cls is defined is used
    module_set.add(inspect.getmodule(adaptor_cls))

    verbose = args.verbose
    use_pdb = args.pdb or args.replay
    save_db = args.save_value_db

    iteration_nr = args.n
    shared_pattern_set = set(args.share)
    random_order = args.random_order

    adaptor = adaptor_cls(args)

    only_list = args.list
    only_template_scripts = args.template_scripts

    rst_expr_list = args.rst_list
    if rst_expr_list:
        only_list = True

    type_goal_pattern_set = set(args.goal)
    callable_goal_pattern_set = set(args.callable_goal)

    if not (type_goal_pattern_set or callable_goal_pattern_set):
        type_goal_pattern_set = set(
            adaptor_cls.get_default_type_goal_pattern_set())

    load_db_path_list = args.load_db
    load_db_pattern_list = args.load_type
    load_db_uuid_list = args.load_uuid
    load_db_replay_uuid = args.replay
    load_db_uuid_args = load_db_replay_uuid or args.load_uuid_args

    user_filter_set = set(args.select)
    user_filter_set.update(args.select_multiple)

    if load_db_replay_uuid and user_filter_set:
        run_parser.error(
            '--replay and --select cannot be used at the same time')

    if load_db_replay_uuid and not load_db_path_list:
        run_parser.error('--load-db must be specified to use --replay')

    restricted_pattern_set = set(args.restrict)
    forbidden_pattern_set = set(args.forbid)
    allowed_pattern_set = set(args.allow)
    allowed_pattern_set.update(restricted_pattern_set)
    allowed_pattern_set.update(callable_goal_pattern_set)
    artifact_dir_link = args.symlink_artifact_dir_to

    # Setup the artifact_dir so we can create a verbose log in there
    date = datetime.datetime.now().strftime('%Y%m%d_%H:%M:%S')
    testsession_uuid = utils.create_uuid()
    if only_template_scripts:
        artifact_dir = pathlib.Path(only_template_scripts)
    elif args.artifact_dir:
        artifact_dir = pathlib.Path(args.artifact_dir)
    # If we are not given a specific folder, we create one under the root we
    # were given
    else:
        artifact_dir = pathlib.Path(args.artifact_root,
                                    date + '_' + testsession_uuid)

    if only_list:
        debug_log = None
        info_log = None
    else:
        artifact_dir.mkdir(parents=True)
        if artifact_dir_link:
            if artifact_dir_link.exists(
            ) and not artifact_dir_link.is_symlink():
                raise ValueError(
                    'This is not a symlink and will not be overwritten: {}'.
                    format(artifact_dir_link))
            with contextlib.suppress(FileNotFoundError):
                artifact_dir_link.unlink()
            artifact_dir_link.symlink_to(artifact_dir,
                                         target_is_directory=True)

        artifact_dir = artifact_dir.resolve()
        # Update the CLI arguments so the customization module has access to the
        # correct value
        args.artifact_dir = artifact_dir
        debug_log = artifact_dir / 'DEBUG.log'
        info_log = artifact_dir / 'INFO.log'

    utils.setup_logging(args.log_level, debug_log, info_log, verbose=verbose)

    # Get the set of all callables in the given set of modules
    callable_pool = utils.get_callable_set(module_set, verbose=verbose)

    # Build the pool of operators from the callables
    non_reusable_type_set = set(
        utils.flatten_seq(
            utils.get_subclasses(cls)
            for cls in adaptor.get_non_reusable_type_set()))

    op_set = build_op_set(
        callable_pool,
        non_reusable_type_set,
        allowed_pattern_set,
        adaptor,
    )

    # If we load some PrebuiltOperator from the DB, we want to keep them in
    # order so that replayed expressions will be replayed in the same order,
    # making it much easier to correlate logs, so from now on, use an
    # OrderedSet()
    op_set = OrderedSet(op_set)

    # Load objects from an existing database
    if load_db_path_list:
        db_list = []
        for db_path in load_db_path_list:
            db = engine.ValueDB.from_path(db_path)
            op_set.update(
                load_from_db(db, adaptor, non_reusable_type_set,
                             load_db_pattern_list, load_db_uuid_list,
                             load_db_uuid_args))
            db_list.append(db)
    # Get the prebuilt operators from the adaptor
    else:
        db_list = []
        op_set.update(adaptor.get_prebuilt_op_set())

    # Force some parameter values to be provided with a specific callable
    patch_map = build_patch_map(args.sweep, args.param, op_set)
    op_set.update(apply_patch_map(patch_map, adaptor))

    # Some operators are hidden in IDs since they don't add useful information
    # (internal classes)
    hidden_callable_set = {
        op.callable_
        for op in adaptor.get_hidden_op_set(op_set)
    }

    # These get_id() options are used for all user-exposed listing that is supposed to be
    # filterable with user_filter_set (like only_list)
    filterable_id_kwargs = dict(full_qual=False,
                                qual=False,
                                with_tags=False,
                                hidden_callable_set=hidden_callable_set)

    # Restrict the Expressions that will be executed to just the one we
    # care about
    if db_list and load_db_replay_uuid:
        id_kwargs = copy.copy(filterable_id_kwargs)
        del id_kwargs['hidden_callable_set']
        # Let the merge logic handle duplicated UUIDs
        db = engine.ValueDB.merge(db_list)
        user_filter_set = {
            db.get_by_uuid(load_db_replay_uuid).get_id(**id_kwargs)
        }

    # Only print once per parameters' tuple
    if verbose:

        @utils.once
        def handle_non_produced(cls_name, consumer_name, param_name,
                                callable_path):
            info(
                'Nothing can produce instances of {cls} needed for {consumer} (parameter "{param}", along path {path})'
                .format(cls=cls_name,
                        consumer=consumer_name,
                        param=param_name,
                        path=' -> '.join(
                            utils.get_name(callable_)
                            for callable_ in callable_path)))

        @utils.once
        def handle_cycle(path):
            error('Cyclic dependency detected: {path}'.format(path=' -> '.join(
                utils.get_name(callable_) for callable_ in path)))
    else:
        handle_non_produced = 'ignore'
        handle_cycle = 'ignore'

    # Get the callable goals, either by the callable name or the value type
    root_op_set = OrderedSet([
        op for op in op_set if
        (utils.match_name(op.get_name(
            full_qual=True), callable_goal_pattern_set) or
         # All producers of the goal types can be a root operator in the
         # expressions we are going to build, i.e. the outermost function call
         utils.match_base_cls(op.value_type, type_goal_pattern_set)
         # Only keep the Expression where the outermost (root) operator is
         # defined in one of the files that were explicitely specified on the
         # command line.
         ) and inspect.getmodule(op.callable_) in module_set
    ])

    # Build the class context from the set of Operator's that we collected
    class_ctx = engine.ClassContext.from_op_set(
        op_set=op_set,
        forbidden_pattern_set=forbidden_pattern_set,
        restricted_pattern_set=restricted_pattern_set)

    # Build the list of Expression that can be constructed from the set of
    # callables
    expr_list = class_ctx.build_expr_list(
        root_op_set,
        non_produced_handler=handle_non_produced,
        cycle_handler=handle_cycle,
    )

    # First, sort with the fully qualified ID so we have the strongest stability
    # possible from one run to another
    expr_list.sort(
        key=lambda expr: expr.get_id(full_qual=True, with_tags=True))
    # Then sort again according to what will be displayed. Since it is a stable
    # sort, it will keep a stable order for IDs that look the same but actually
    # differ in their hidden part
    expr_list.sort(key=lambda expr: expr.get_id(qual=False, with_tags=True))

    if random_order:
        random.shuffle(expr_list)

    if user_filter_set:
        expr_list = [
            expr for expr in expr_list
            if utils.match_name(expr.get_id(
                **filterable_id_kwargs), user_filter_set)
        ]

    if not expr_list:
        info(
            'Nothing to do, check --help while passing some python sources to get the full help.'
        )
        return 1

    id_kwargs = {
        **filterable_id_kwargs,
        'full_qual': bool(verbose),
    }

    if rst_expr_list:
        id_kwargs['style'] = 'rst'
        for expr in expr_list:
            out('* {}'.format(expr.get_id(**id_kwargs)))
    else:
        out('The following expressions will be executed:\n')
        for expr in expr_list:
            out(expr.get_id(**id_kwargs))

            if verbose >= 2:
                out(expr.format_structure() + '\n')

        formatted_out = adaptor.format_expr_list(expr_list, verbose=verbose)
        if formatted_out:
            out('\n' + formatted_out + '\n')

    if only_list:
        return 0

    # Get a list of ComputableExpression in order to execute them
    expr_list = engine.ComputableExpression.from_expr_list(expr_list)

    if iteration_nr > 1:
        shared_op_set = {
            # We don't allow matching on root operators, since that would be
            # pointless. Sharing root operators basically means doing the work
            # once, and then reusing everything at every iteration.
            op
            for op in (op_set - root_op_set)
            if utils.match_base_cls(op.value_type, shared_pattern_set)
        }

        def predicate(expr):
            return expr.op not in shared_op_set

        iteration_expr_list = [
            # Apply CSE within each iteration
            engine.ComputableExpression.cse(
                expr.clone_by_predicate(predicate) for expr in expr_list)
            for i in range(iteration_nr)
        ]
    else:
        iteration_expr_list = [expr_list]

    # Make sure all references to Consumer are cloned appropriately
    for expr in utils.flatten_seq(iteration_expr_list):
        expr.prepare_execute()

    exec_ret_code = exec_expr_list(
        iteration_expr_list=iteration_expr_list,
        adaptor=adaptor,
        artifact_dir=artifact_dir,
        testsession_uuid=testsession_uuid,
        hidden_callable_set=hidden_callable_set,
        only_template_scripts=only_template_scripts,
        adaptor_cls=adaptor_cls,
        verbose=verbose,
        save_db=save_db,
        use_pdb=use_pdb,
    )

    # If we reloaded a DB, merge it with the current DB so the outcome is a
    # self-contained artifact dir
    if load_db_path_list and save_db:
        orig_list = [
            path if path.is_dir() else path.parent
            for path in map(pathlib.Path, load_db_path_list)
        ]
        do_merge(orig_list, artifact_dir, output_exist=True)

    return exec_ret_code