def prepare_arguments(self, **kwargs): """ Process runtime arguments passed to ``.apply()` and derive default values for any remaining arguments. """ # Process data-carriers (first overrides, then fill up with whatever is needed) args = ReducerMap() args.update([p._arg_values(**kwargs) for p in self.input if p.name in kwargs]) args.update([p._arg_values() for p in self.input if p.name not in args]) args = args.reduce_all() # Process dimensions (derived go after as they might need/affect their parents) derived, main = split(self.dimensions, lambda i: i.is_Derived) for p in main: args.update(p._arg_values(args, self._dspace[p], **kwargs)) for p in derived: args.update(p._arg_values(args, self._dspace[p], **kwargs)) # Sanity check for p in self.input: p._arg_check(args, self._dspace[p]) # Derive additional values for DLE arguments # TODO: This is not pretty, but it works for now. Ideally, the # DLE arguments would be massaged into the IET so as to comply # with the rest of the argument derivation procedure. for arg in self._dle_args: dim = arg.argument osize = (1 + arg.original_dim.symbolic_end - arg.original_dim.symbolic_start).subs(args) if dim.symbolic_size in self.parameters: if arg.value is None: args[dim.symbolic_size.name] = osize elif isinstance(arg.value, int): args[dim.symbolic_size.name] = arg.value else: args[dim.symbolic_size.name] = arg.value(osize) # Add in the profiler argument args[self.profiler.name] = self.profiler.new() # Add in any backend-specific argument args.update(kwargs.pop('backend', {})) # Execute autotuning and adjust arguments accordingly if kwargs.pop('autotune', False): args = self._autotune(args) # Check all user-provided keywords are known to the Operator for k, v in kwargs.items(): if k not in self.known_arguments: raise ValueError("Unrecognized argument %s=%s passed to `apply`" % (k, v)) return args
def _prepare_arguments(self, **kwargs): """ Process runtime arguments passed to ``.apply()` and derive default values for any remaining arguments. """ # Process data-carriers (first overrides, then fill up with whatever is needed) args = ReducerMap() args.update( [p._arg_values(**kwargs) for p in self.input if p.name in kwargs]) args.update( [p._arg_values() for p in self.input if p.name not in args]) args = args.reduce_all() # All TensorFunctions should be defined on the same Grid functions = [ kwargs.get(p, p) for p in self.input if p.is_TensorFunction ] mapper = ReducerMap([('grid', i.grid) for i in functions if i.grid]) try: grid = mapper.unique('grid') except (KeyError, ValueError): if mapper and configuration['mpi']: raise RuntimeError("Multiple `Grid`s found before `apply`") grid = None # Process dimensions (derived go after as they might need/affect their parents) derived, main = split(self.dimensions, lambda i: i.is_Derived) for p in main: args.update(p._arg_values(args, self._dspace[p], grid, **kwargs)) for p in derived: args.update(p._arg_values(args, self._dspace[p], grid, **kwargs)) # Sanity check for p in self.input: p._arg_check(args, self._dspace[p]) # Add in the profiler argument args[self._profiler.name] = self._profiler.timer.reset() # Add in any backend-specific argument args.update(kwargs.pop('backend', {})) # Execute autotuning and adjust arguments accordingly args = self._autotune( args, kwargs.pop('autotune', configuration['autotuning'])) # Check all user-provided keywords are known to the Operator if not configuration['ignore-unknowns']: for k, v in kwargs.items(): if k not in self._known_arguments: raise ValueError("Unrecognized argument %s=%s" % (k, v)) return args
def _arg_defaults(self): """A map of default argument values defined by this Grid.""" args = ReducerMap() for k, v in self.dimension_map.items(): args.update(k._arg_defaults(_min=0, size=v.loc)) if self.distributor.is_parallel: distributor = self.distributor args[distributor._obj_comm.name] = distributor._obj_comm.value args[distributor._obj_neighborhood.name] = distributor._obj_neighborhood.value return args
def _arg_defaults(self): """A map of default argument values defined by this Grid.""" args = ReducerMap() for k, v in self.dimension_map.items(): args.update(k._arg_defaults(_min=0, size=v.loc)) if configuration['mpi']: distributor = self.distributor args[distributor._obj_comm.name] = distributor._obj_comm.value args[distributor._obj_neighborhood.name] = distributor._obj_neighborhood.value return args
def _arg_defaults(self): """ Returns a map of default argument values defined by this Grid. """ args = ReducerMap() for k, v in self.dimension_map.items(): args.update(k._arg_defaults(start=0, size=v.loc)) if configuration['mpi']: distributor = self.distributor args[distributor._C_comm.name] = distributor._C_comm.value args[distributor._C_neighbours.obj. name] = distributor._C_neighbours.obj.value return args
def _arg_defaults(self, alias=None): key = alias or self mapper = {self: key} mapper.update({getattr(self, i): getattr(key, i) for i in self._sub_functions}) args = ReducerMap() # Add in the sparse data (as well as any SubFunction data) belonging to # self's local domain only for k, v in self._dist_scatter().items(): args[mapper[k].name] = v for i, s, o in zip(mapper[k].indices, v.shape, k.staggered): args.update(i._arg_defaults(_min=0, size=s+o)) # Add MPI-related data structures args.update(self.grid._arg_defaults()) return args
def _arg_defaults(self, alias=None): """ A map of default argument values defined by this symbol. Parameters ---------- alias : DiscreteFunction, optional To bind the argument values to different names. """ key = alias or self args = ReducerMap({key.name: self._data_buffer}) # Collect default dimension arguments from all indices for i, s, o in zip(key.indices, self.shape, self.staggered): args.update(i._arg_defaults(_min=0, size=s+o)) # Add MPI-related data structures if self.grid is not None: args.update(self.grid._arg_defaults()) return args
def _arg_defaults(self): """A map of default argument values defined by this Grid.""" args = ReducerMap() # Dimensions size for k, v in self.dimension_map.items(): args.update(k._arg_defaults(_min=0, size=v.loc)) # Dimensions spacing args.update({k.name: v for k, v in self.spacing_map.items()}) # Grid origin args.update({k.name: v for k, v in self.origin_map.items()}) # MPI-related objects if self.distributor.is_parallel: distributor = self.distributor args[distributor._obj_comm.name] = distributor._obj_comm.value args[distributor._obj_neighborhood.name] = distributor._obj_neighborhood.value return args
def _prepare_arguments(self, **kwargs): """ Process runtime arguments passed to ``.apply()` and derive default values for any remaining arguments. """ overrides, defaults = split(self.input, lambda p: p.name in kwargs) # Process data-carrier overrides args = ReducerMap() for p in overrides: args.update(p._arg_values(**kwargs)) try: args = ReducerMap(args.reduce_all()) except ValueError: raise ValueError( "Override `%s` is incompatible with overrides `%s`" % (p, [i for i in overrides if i.name in args])) # Process data-carrier defaults for p in defaults: if p.name in args: # E.g., SubFunctions continue for k, v in p._arg_values(**kwargs).items(): if k in args and args[k] != v: raise ValueError( "Default `%s` is incompatible with other args as " "`%s=%s`, while `%s=%s` is expected. Perhaps you " "forgot to override `%s`?" % (p, k, v, k, args[k], p)) args[k] = v args = args.reduce_all() # All DiscreteFunctions should be defined on the same Grid grids = {getattr(p, 'grid', None) for p in self.input} - {None} if len(grids) > 1 and configuration['mpi']: raise ValueError("Multiple Grids found") try: grid = grids.pop() except KeyError: grid = None # Process Dimensions (derived go after as they might need/affect their parents) derived, main = split(self.dimensions, lambda i: i.is_Derived) for d in main: args.update(d._arg_values(args, self._dspace[d], grid, **kwargs)) for d in derived: args.update(d._arg_values(args, self._dspace[d], grid, **kwargs)) # Process Objects (which may need some `args`) for o in self.objects: args.update(o._arg_values(args, **kwargs)) # Sanity check for p in self.parameters: p._arg_check(args, self._dspace[p]) # Turn arguments into a format suitable for the generated code # E.g., instead of NumPy arrays for Functions, the generated code expects # pointers to ctypes.Struct for p in self.parameters: try: args.update(kwargs.get(p.name, p)._arg_as_ctype(args, alias=p)) except AttributeError: # User-provided floats/ndarray obviously do not have `_arg_as_ctype` args.update(p._arg_as_ctype(args, alias=p)) # Add in the profiler argument args[self._profiler.name] = self._profiler.timer.reset() # Add in any backend-specific argument args.update(kwargs.pop('backend', {})) # Execute autotuning and adjust arguments accordingly args = self._autotune( args, kwargs.pop('autotune', configuration['autotuning'])) # Check all user-provided keywords are known to the Operator if not configuration['ignore-unknowns']: for k, v in kwargs.items(): if k not in self._known_arguments: raise ValueError("Unrecognized argument %s=%s" % (k, v)) return args
def _prepare_arguments(self, **kwargs): """ Process runtime arguments passed to ``.apply()` and derive default values for any remaining arguments. """ # Process data-carriers (first overrides, then fill up with whatever is needed) args = ReducerMap() args.update( [p._arg_values(**kwargs) for p in self.input if p.name in kwargs]) args.update( [p._arg_values() for p in self.input if p.name not in args]) args = args.reduce_all() # All TensorFunctions should be defined on the same Grid functions = [ kwargs.get(p, p) for p in self.input if p.is_TensorFunction ] mapper = ReducerMap([('grid', i.grid) for i in functions if i.grid]) try: grid = mapper.unique('grid') except (KeyError, ValueError): if mapper and configuration['mpi']: raise RuntimeError("Multiple `Grid`s found before `apply`") grid = None # Process dimensions (derived go after as they might need/affect their parents) derived, main = split(self.dimensions, lambda i: i.is_Derived) for p in main: args.update(p._arg_values(args, self._dspace[p], grid, **kwargs)) for p in derived: args.update(p._arg_values(args, self._dspace[p], grid, **kwargs)) # Sanity check for p in self.input: p._arg_check(args, self._dspace[p]) # Derive additional values for DLE arguments # TODO: This is not pretty, but it works for now. Ideally, the # DLE arguments would be massaged into the IET so as to comply # with the rest of the argument derivation procedure. for arg in self._dle_args: dim = arg.argument osize = (1 + arg.original_dim.symbolic_end - arg.original_dim.symbolic_start).subs(args) if arg.value is None: args[dim.symbolic_size.name] = osize elif isinstance(arg.value, int): args[dim.symbolic_size.name] = arg.value else: args[dim.symbolic_size.name] = arg.value(osize) # Add in the profiler argument args[self.profiler.name] = self.profiler.timer.reset() # Add in any backend-specific argument args.update(kwargs.pop('backend', {})) # Execute autotuning and adjust arguments accordingly if kwargs.pop('autotune', configuration['autotuning'].level): args = self._autotune(args) # Check all user-provided keywords are known to the Operator for k, v in kwargs.items(): if k not in self._known_arguments: raise ValueError( "Unrecognized argument %s=%s passed to `apply`" % (k, v)) return args
def _prepare_arguments(self, **kwargs): """ Process runtime arguments passed to ``.apply()` and derive default values for any remaining arguments. """ overrides, defaults = split(self.input, lambda p: p.name in kwargs) # Process data-carrier overrides args = ReducerMap() for p in overrides: args.update(p._arg_values(**kwargs)) try: args = ReducerMap(args.reduce_all()) except ValueError: raise ValueError( "Override `%s` is incompatible with overrides `%s`" % (p, [i for i in overrides if i.name in args])) # Process data-carrier defaults for p in defaults: if p.name in args: # E.g., SubFunctions continue for k, v in p._arg_values(**kwargs).items(): if k in args and args[k] != v: raise ValueError( "Default `%s` is incompatible with other args as " "`%s=%s`, while `%s=%s` is expected. Perhaps you " "forgot to override `%s`?" % (p, k, v, k, args[k], p)) args[k] = v args = args.reduce_all() # All DiscreteFunctions should be defined on the same Grid grids = {getattr(kwargs[p.name], 'grid', None) for p in overrides} grids.update({getattr(p, 'grid', None) for p in defaults}) grids.discard(None) if len(grids) > 1 and configuration['mpi']: raise ValueError("Multiple Grids found") try: grid = grids.pop() args.update(grid._arg_values(**kwargs)) except KeyError: grid = None # Process Dimensions # A topological sorting is used so that derived Dimensions are processed after # their parents (note that a leaf Dimension can have an arbitrary long list of # ancestors) dag = DAG(self.dimensions, [(i, i.parent) for i in self.dimensions if i.is_Derived]) for d in reversed(dag.topological_sort()): args.update(d._arg_values(args, self._dspace[d], grid, **kwargs)) # Process Objects (which may need some `args`) for o in self.objects: args.update(o._arg_values(args, grid=grid, **kwargs)) # Sanity check for p in self.parameters: p._arg_check(args, self._dspace[p]) for d in self.dimensions: if d.is_Derived: d._arg_check(args, self._dspace[p]) # Turn arguments into a format suitable for the generated code # E.g., instead of NumPy arrays for Functions, the generated code expects # pointers to ctypes.Struct for p in self.parameters: try: args.update(kwargs.get(p.name, p)._arg_as_ctype(args, alias=p)) except AttributeError: # User-provided floats/ndarray obviously do not have `_arg_as_ctype` args.update(p._arg_as_ctype(args, alias=p)) # Execute autotuning and adjust arguments accordingly args = self._autotune( args, kwargs.pop('autotune', configuration['autotuning'])) # Check all user-provided keywords are known to the Operator if not configuration['ignore-unknowns']: for k, v in kwargs.items(): if k not in self._known_arguments: raise ValueError("Unrecognized argument %s=%s" % (k, v)) # Attach `grid` to the arguments map args = ArgumentsMap(grid, **args) return args
def _prepare_arguments(self, **kwargs): """ Process runtime arguments passed to ``.apply()` and derive default values for any remaining arguments. """ overrides, defaults = split(self.input, lambda p: p.name in kwargs) # Process data-carrier overrides args = ReducerMap() for p in overrides: args.update(p._arg_values(**kwargs)) try: args = ReducerMap(args.reduce_all()) except ValueError: raise ValueError("Override `%s` is incompatible with overrides `%s`" % (p, [i for i in overrides if i.name in args])) # Process data-carrier defaults for p in defaults: if p.name in args: # E.g., SubFunctions continue for k, v in p._arg_values(**kwargs).items(): if k in args and args[k] != v: raise ValueError("Default `%s` is incompatible with other args as " "`%s=%s`, while `%s=%s` is expected. Perhaps you " "forgot to override `%s`?" % (p, k, v, k, args[k], p)) args[k] = v args = args.reduce_all() # All DiscreteFunctions should be defined on the same Grid grids = {getattr(p, 'grid', None) for p in self.input} - {None} if len(grids) > 1 and configuration['mpi']: raise ValueError("Multiple Grids found") try: grid = grids.pop() except KeyError: grid = None # Process Dimensions (derived go after as they might need/affect their parents) derived, main = split(self.dimensions, lambda i: i.is_Derived) for d in main: args.update(d._arg_values(args, self._dspace[d], grid, **kwargs)) for d in derived: args.update(d._arg_values(args, self._dspace[d], grid, **kwargs)) # Process Objects (which may need some `args`) for o in self.objects: args.update(o._arg_values(args, **kwargs)) # Sanity check for p in self.parameters: p._arg_check(args, self._dspace[p]) # Turn arguments into a format suitable for the generated code # E.g., instead of NumPy arrays for Functions, the generated code expects # pointers to ctypes.Struct for p in self.parameters: try: args.update(kwargs.get(p.name, p)._arg_as_ctype(args, alias=p)) except AttributeError: # User-provided floats/ndarray obviously do not have `_arg_as_ctype` args.update(p._arg_as_ctype(args, alias=p)) # Add in the profiler argument args[self._profiler.name] = self._profiler.timer.reset() # Add in any backend-specific argument args.update(kwargs.pop('backend', {})) # Execute autotuning and adjust arguments accordingly args = self._autotune(args, kwargs.pop('autotune', configuration['autotuning'])) # Check all user-provided keywords are known to the Operator if not configuration['ignore-unknowns']: for k, v in kwargs.items(): if k not in self._known_arguments: raise ValueError("Unrecognized argument %s=%s" % (k, v)) return args