예제 #1
0
def find_offloadable_trees(iet):
    """
    Return the trees within ``iet`` that can be computed by YASK.

    A tree is "offloadable to YASK" if it is embedded in a time stepping loop
    *and* all of the grids accessed by the enclosed equations are homogeneous
    (i.e., same dimensions and data type).
    """
    Offloadable = namedtuple('Offlodable', 'trees grid dtype')
    Offloadable.__new__.__defaults__ = [], None, None

    reducer = ReducerMap()

    # Find offloadable candidates
    for tree in retrieve_iteration_tree(iet):
        # The outermost iteration must be over time and it must
        # nest at least one iteration
        if len(tree) <= 1:
            continue
        if not tree.root.dim.is_Time:
            continue
        grid_tree = tree[1:]
        if not all(i.is_Affine for i in grid_tree):
            # Non-affine array accesses unsupported by YASK
            continue
        bundles = [i for i in tree.inner.nodes if i.is_ExpressionBundle]
        if len(bundles) != 1:
            # Illegal nest
            continue
        bundle = bundles[0]
        # Found an offloadable candidate
        reducer.setdefault('grid_trees', []).append(grid_tree)
        # Track `grid` and `dtype`
        functions = flatten(i.functions for i in bundle.exprs)
        reducer.extend(
            ('grid', i.grid) for i in functions if i.is_TimeFunction)
        reducer.extend(
            ('dtype', i.dtype) for i in functions if i.is_TimeFunction)

    # `grid` and `dtype` must be unique
    try:
        grid = reducer.unique('grid')
        dtype = reducer.unique('dtype')
        trees = reducer['grid_trees']
    except (KeyError, ValueError):
        return Offloadable()

    # Do the trees iterate over a convex section of the grid?
    if not all(i.dim._defines | set(grid.dimensions) for i in flatten(trees)):
        return Offloadable()

    return Offloadable(trees, grid, dtype)
예제 #2
0
파일: operator.py 프로젝트: ponykid/SNIST
    def _prepare_arguments(self, **kwargs):
        """
        Process runtime arguments passed to ``.apply()` and derive
        default values for any remaining arguments.
        """
        # Process data-carriers (first overrides, then fill up with whatever is needed)
        args = ReducerMap()
        args.update(
            [p._arg_values(**kwargs) for p in self.input if p.name in kwargs])
        args.update(
            [p._arg_values() for p in self.input if p.name not in args])
        args = args.reduce_all()

        # All TensorFunctions should be defined on the same Grid
        functions = [
            kwargs.get(p, p) for p in self.input if p.is_TensorFunction
        ]
        mapper = ReducerMap([('grid', i.grid) for i in functions if i.grid])
        try:
            grid = mapper.unique('grid')
        except (KeyError, ValueError):
            if mapper and configuration['mpi']:
                raise RuntimeError("Multiple `Grid`s found before `apply`")
            grid = None

        # Process dimensions (derived go after as they might need/affect their parents)
        derived, main = split(self.dimensions, lambda i: i.is_Derived)
        for p in main:
            args.update(p._arg_values(args, self._dspace[p], grid, **kwargs))
        for p in derived:
            args.update(p._arg_values(args, self._dspace[p], grid, **kwargs))

        # Sanity check
        for p in self.input:
            p._arg_check(args, self._dspace[p])

        # Add in the profiler argument
        args[self._profiler.name] = self._profiler.timer.reset()

        # Add in any backend-specific argument
        args.update(kwargs.pop('backend', {}))

        # Execute autotuning and adjust arguments accordingly
        args = self._autotune(
            args, kwargs.pop('autotune', configuration['autotuning']))

        # Check all user-provided keywords are known to the Operator
        if not configuration['ignore-unknowns']:
            for k, v in kwargs.items():
                if k not in self._known_arguments:
                    raise ValueError("Unrecognized argument %s=%s" % (k, v))

        return args
예제 #3
0
    def _prepare_arguments(self, **kwargs):
        """
        Process runtime arguments passed to ``.apply()` and derive
        default values for any remaining arguments.
        """
        # Process data-carriers (first overrides, then fill up with whatever is needed)
        args = ReducerMap()
        args.update(
            [p._arg_values(**kwargs) for p in self.input if p.name in kwargs])
        args.update(
            [p._arg_values() for p in self.input if p.name not in args])
        args = args.reduce_all()

        # All TensorFunctions should be defined on the same Grid
        functions = [
            kwargs.get(p, p) for p in self.input if p.is_TensorFunction
        ]
        mapper = ReducerMap([('grid', i.grid) for i in functions if i.grid])
        try:
            grid = mapper.unique('grid')
        except (KeyError, ValueError):
            if mapper and configuration['mpi']:
                raise RuntimeError("Multiple `Grid`s found before `apply`")
            grid = None

        # Process dimensions (derived go after as they might need/affect their parents)
        derived, main = split(self.dimensions, lambda i: i.is_Derived)
        for p in main:
            args.update(p._arg_values(args, self._dspace[p], grid, **kwargs))
        for p in derived:
            args.update(p._arg_values(args, self._dspace[p], grid, **kwargs))

        # Sanity check
        for p in self.input:
            p._arg_check(args, self._dspace[p])

        # Derive additional values for DLE arguments
        # TODO: This is not pretty, but it works for now. Ideally, the
        # DLE arguments would be massaged into the IET so as to comply
        # with the rest of the argument derivation procedure.
        for arg in self._dle_args:
            dim = arg.argument
            osize = (1 + arg.original_dim.symbolic_end -
                     arg.original_dim.symbolic_start).subs(args)
            if arg.value is None:
                args[dim.symbolic_size.name] = osize
            elif isinstance(arg.value, int):
                args[dim.symbolic_size.name] = arg.value
            else:
                args[dim.symbolic_size.name] = arg.value(osize)

        # Add in the profiler argument
        args[self.profiler.name] = self.profiler.timer.reset()

        # Add in any backend-specific argument
        args.update(kwargs.pop('backend', {}))

        # Execute autotuning and adjust arguments accordingly
        if kwargs.pop('autotune', configuration['autotuning'].level):
            args = self._autotune(args)

        # Check all user-provided keywords are known to the Operator
        for k, v in kwargs.items():
            if k not in self._known_arguments:
                raise ValueError(
                    "Unrecognized argument %s=%s passed to `apply`" % (k, v))

        return args
예제 #4
0
    def _prepare_arguments(self, **kwargs):
        """
        Process runtime arguments passed to ``.apply()` and derive
        default values for any remaining arguments.
        """
        overrides, defaults = split(self.input, lambda p: p.name in kwargs)
        # Process data-carrier overrides
        args = ReducerMap()
        for p in overrides:
            args.update(p._arg_values(**kwargs))
            try:
                args = ReducerMap(args.reduce_all())
            except ValueError:
                raise ValueError(
                    "Override `%s` is incompatible with overrides `%s`" %
                    (p, [i for i in overrides if i.name in args]))
        # Process data-carrier defaults
        for p in defaults:
            if p.name in args:
                # E.g., SubFunctions
                continue
            for k, v in p._arg_values(**kwargs).items():
                if k in args and args[k] != v:
                    raise ValueError(
                        "Default `%s` is incompatible with other args as "
                        "`%s=%s`, while `%s=%s` is expected. Perhaps you "
                        "forgot to override `%s`?" % (p, k, v, k, args[k], p))
                args[k] = v
        args = args.reduce_all()

        # All DiscreteFunctions should be defined on the same Grid
        functions = [
            kwargs.get(p, p) for p in self.input if p.is_DiscreteFunction
        ]
        mapper = ReducerMap([('grid', i.grid) for i in functions if i.grid])
        try:
            grid = mapper.unique('grid')
        except (KeyError, ValueError):
            if mapper and configuration['mpi']:
                raise RuntimeError("Multiple `Grid`s found before `apply`")
            grid = None

        # Process dimensions (derived go after as they might need/affect their parents)
        derived, main = split(self.dimensions, lambda i: i.is_Derived)
        for p in main:
            args.update(p._arg_values(args, self._dspace[p], grid, **kwargs))
        for p in derived:
            args.update(p._arg_values(args, self._dspace[p], grid, **kwargs))

        # Sanity check
        for p in self.input:
            p._arg_check(args, self._dspace[p])

        # Turn arguments into a format suitable for the generated code
        # E.g., instead of NumPy arrays for Functions, the generated code expects
        # pointers to ctypes.Struct
        for p in self.input:
            try:
                args.update(kwargs.get(p.name, p)._arg_as_ctype(args, alias=p))
            except AttributeError:
                # User-provided floats/ndarray obviously do not have `_arg_as_ctype`
                args.update(p._arg_as_ctype(args, alias=p))

        # Add in the profiler argument
        args[self._profiler.name] = self._profiler.timer.reset()

        # Add in any backend-specific argument
        args.update(kwargs.pop('backend', {}))

        # Execute autotuning and adjust arguments accordingly
        args = self._autotune(
            args, kwargs.pop('autotune', configuration['autotuning']))

        # Check all user-provided keywords are known to the Operator
        if not configuration['ignore-unknowns']:
            for k, v in kwargs.items():
                if k not in self._known_arguments:
                    raise ValueError("Unrecognized argument %s=%s" % (k, v))

        return args