def _create_basis(self, function_space, mapping, basis_name_func, first_dim_value_func): '''Internal utility to create an LFRic basis or differential basis function argument specific to the particular quadrature that is being used and add it to the symbol table and argument list. Also declare the associated "ndf" symbol and any quadrature-specific symbols if they have not already been declared so that they can be used to dimension the basis or differential basis symbol. This utility function is used to avoid code replication as the structure of a basis function is very similar to the structure of a differential basis function. :param function_space: the function space that this basis or \ differential basis function is on. :type function_space: :py:class:`psyclone.domain.lfric.FunctionSpace` :param dict mapping: a mapping from quadrature type to basis \ or differential basis class name. :param method basis_name_func: a method that returns the name \ of the basis or differential basis function for the \ current function space. :param function first_dim_value_func: a function that returns \ the size of the first dimension of the basis or \ differential basis function for the current function \ space. :raises NotImplementedError: if an evaluator shape is found \ that is not a quadrature shape (currently just \ 'gh_evaluator'). :raises InternalError: if the supplied evaluator shape is not \ recognised. ''' # pylint: disable=too-many-locals const = LFRicConstants() for shape in self._kern.eval_shapes: fs_name = function_space.orig_name ndf_symbol = self._symbol_table.symbol_from_tag( "ndf_{0}".format(fs_name), symbol_type=lfric_psyir.NumberOfDofsDataSymbol, fs=fs_name, interface=self._read_access) # Create the qr tag by appending the last part of the shape # name to "qr_". quad_name = shape.split("_")[-1] basis_tag = basis_name_func(qr_var="qr_" + quad_name) if shape == "gh_quadrature_xyoz": nqp_xy = self._symbol_table.symbol_from_tag( "nqp_xy", symbol_type=lfric_psyir.NumberOfQrPointsInXyDataSymbol, interface=self._read_access) nqp_z = self._symbol_table.symbol_from_tag( "nqp_z", symbol_type=lfric_psyir.NumberOfQrPointsInZDataSymbol, interface=self._read_access) arg = mapping["gh_quadrature_xyoz"]( basis_tag, [ int(first_dim_value_func(function_space)), Reference(ndf_symbol), Reference(nqp_xy), Reference(nqp_z) ], fs_name, interface=self._read_access) elif shape == "gh_quadrature_face": nfaces = self._symbol_table.symbol_from_tag( "nfaces", symbol_type=lfric_psyir.NumberOfFacesDataSymbol, interface=self._read_access) nqp = self._symbol_table.symbol_from_tag( "nqp_faces", symbol_type=lfric_psyir.NumberOfQrPointsInFacesDataSymbol, interface=self._read_access) arg = mapping["gh_quadrature_face"]( basis_tag, [ int(first_dim_value_func(function_space)), Reference(ndf_symbol), Reference(nqp), Reference(nfaces) ], fs_name, interface=self._read_access) elif shape == "gh_quadrature_edge": nedges = self._symbol_table.symbol_from_tag( "nedges", symbol_type=lfric_psyir.NumberOfEdgesDataSymbol, interface=self._read_access) nqp = self._symbol_table.symbol_from_tag( "nqp_edges", symbol_type=lfric_psyir.NumberOfQrPointsInEdgesDataSymbol, interface=self._read_access) arg = mapping["gh_quadrature_edge"]( basis_tag, [ int(first_dim_value_func(function_space)), Reference(ndf_symbol), Reference(nqp), Reference(nedges) ], fs_name, interface=self._read_access) elif shape in const.VALID_EVALUATOR_SHAPES: # Need a (diff) basis array for each target space upon # which the basis functions have been # evaluated. _kern.eval_targets is a dict where the # values are 2-tuples of (FunctionSpace, argument). for _, _ in self._kern.eval_targets.items(): raise NotImplementedError( "Evaluator shapes not implemented in kernel_interface " "class.") else: raise InternalError( "Unrecognised quadrature or evaluator shape '{0}'. " "Expected one of: {1}.".format( shape, const.VALID_EVALUATOR_SHAPES)) self._symbol_table.add(arg) self._arglist.append(arg)
def test_validate_kernel_code_arg(monkeypatch): '''Test that this method returns successfully if its two arguments have identical content, otherwise test that the expected exceptions are raised. ''' kernel = DynKern() # Kernel name needs to be set when testing exceptions. kernel._name = "dummy" read_access = ArgumentInterface(ArgumentInterface.Access.READ) real_scalar_symbol = DataSymbol( "generic_real_scalar", REAL_TYPE, interface=read_access) int_scalar_symbol = DataSymbol( "generic_int_scalar", INTEGER_TYPE, interface=read_access) real_scalar_rw_symbol = DataSymbol( "generic_scalar_rw", REAL_TYPE, interface=ArgumentInterface(ArgumentInterface.Access.READWRITE)) lfric_real_scalar_symbol = LfricRealScalarDataSymbol( "scalar", interface=read_access) lfric_int_scalar_symbol = LfricIntegerScalarDataSymbol( "scalar", interface=read_access) lfric_real_field_symbol = RealFieldDataDataSymbol( "field", dims=[1], fs="w0", interface=read_access) kernel._validate_kernel_code_arg( lfric_real_scalar_symbol, lfric_real_scalar_symbol) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg( lfric_real_scalar_symbol, lfric_int_scalar_symbol) assert ( "Kernel argument 'scalar' has datatype 'Intrinsic.REAL' in kernel " "'dummy' but the LFRic API expects 'Intrinsic.INTEGER'" in str(info.value)) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg( real_scalar_symbol, lfric_real_scalar_symbol) assert ("Kernel argument 'generic_real_scalar' has precision 'UNDEFINED' " "in kernel 'dummy' but the LFRic API expects 'r_def'." in str(info.value)) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg(real_scalar_symbol, real_scalar_rw_symbol) assert ("Kernel argument 'generic_real_scalar' has intent 'READ' in " "kernel 'dummy' but the LFRic API expects intent " "'READWRITE'." in str(info.value)) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg(lfric_real_field_symbol, lfric_real_scalar_symbol) assert ("Argument 'field' to kernel 'dummy' should be a scalar " "according to the LFRic API, but it is not." in str(info.value)) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg(lfric_real_scalar_symbol, lfric_real_field_symbol) assert ("Argument 'scalar' to kernel 'dummy' should be an array " "according to the LFRic API, but it is not." in str(info.value)) undf = NumberOfUniqueDofsDataSymbol("undf", fs="w0", interface=read_access) lfric_real_field_symbol2 = RealFieldDataDataSymbol( "field", dims=[Reference(undf)], fs="w0", interface=read_access) # if one of the dimensions is not a datasymbol then the arguments # are not checked. kernel._validate_kernel_code_arg(lfric_real_field_symbol, lfric_real_field_symbol2) kernel._validate_kernel_code_arg(lfric_real_field_symbol2, lfric_real_field_symbol) lfric_real_field_symbol3 = RealFieldDataDataSymbol( "field", dims=[Reference(undf)], fs="w0", interface=read_access) monkeypatch.setattr(lfric_real_field_symbol3.datatype, "_shape", [Reference(undf), Reference(undf)]) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg(lfric_real_field_symbol2, lfric_real_field_symbol3) assert ("Argument 'field' to kernel 'dummy' should be an array with 2 " "dimension(s) according to the LFRic API, but found 1." in str(info.value)) lfric_real_field_symbol4 = RealFieldDataDataSymbol( "field", dims=[Reference(int_scalar_symbol)], fs="w0", interface=read_access) with pytest.raises(GenerationError) as info: kernel._validate_kernel_code_arg( lfric_real_field_symbol4, lfric_real_field_symbol2) assert ( "For dimension 1 in array argument 'field' to kernel 'dummy' the " "following error was found: Kernel argument 'generic_int_scalar' " "has precision 'UNDEFINED' in kernel 'dummy' but the LFRic API " "expects 'i_def'" in str(info.value)) # monkeypatch lfric_real_scalar_symbol to return that it is not a # scalar in order to force the required exception. We do this by # changing the ScalarType as it is used when determining whether # the symbol is a scalar. monkeypatch.setattr(psyclone.psyir.symbols, "ScalarType", str) with pytest.raises(InternalError) as info: kernel._validate_kernel_code_arg( lfric_real_scalar_symbol, lfric_real_scalar_symbol) assert ( "unexpected argument type found for 'scalar' in kernel 'dummy'. " "Expecting a scalar or an array." in str(info.value))
def quad_rule(self, var_accesses=None): '''Create LFRic arguments associated with the required quadrature, if they do not already exist, and add them to the symbol table and argument list. The arguments depend on the type of quadrature requested. :param var_accesses: an unused optional argument that stores \ information about variable accesses. :type var_accesses: :\ py:class:`psyclone.core.access_info.VariablesAccessInfo` :raises InternalError: if an unsupported quadrature shape is \ found. ''' # The kernel captures all the required quadrature shapes for shape in self._kern.qr_rules: if shape == "gh_quadrature_xyoz": nqp_xy = self._symbol_table.symbol_from_tag( "nqp_xy", symbol_type=lfric_psyir.NumberOfQrPointsInXyDataSymbol, interface=self._read_access) nqp_z = self._symbol_table.symbol_from_tag( "nqp_z", symbol_type=lfric_psyir.NumberOfQrPointsInZDataSymbol, interface=self._read_access) weights_xy = self._symbol_table.symbol_from_tag( "weights_xy", symbol_type=lfric_psyir.QrWeightsInXyDataSymbol, dims=[Reference(nqp_xy)], interface=self._read_access) weights_z = self._symbol_table.symbol_from_tag( "weights_z", symbol_type=lfric_psyir.QrWeightsInZDataSymbol, dims=[Reference(nqp_z)], interface=self._read_access) self._arglist.extend([nqp_xy, nqp_z, weights_xy, weights_z]) elif shape == "gh_quadrature_face": nfaces = self._symbol_table.symbol_from_tag( "nfaces", symbol_type=lfric_psyir.NumberOfFacesDataSymbol, interface=self._read_access) nqp = self._symbol_table.symbol_from_tag( "nqp_faces", symbol_type=lfric_psyir.NumberOfQrPointsInFacesDataSymbol, interface=self._read_access) weights = self._symbol_table.symbol_from_tag( "weights_faces", symbol_type=lfric_psyir.QrWeightsInFacesDataSymbol, dims=[Reference(nqp)], interface=self._read_access) self._arglist.extend([nfaces, nqp, weights]) elif shape == "gh_quadrature_edge": nedges = self._symbol_table.symbol_from_tag( "nedges", symbol_type=lfric_psyir.NumberOfEdgesDataSymbol, interface=self._read_access) nqp = self._symbol_table.symbol_from_tag( "nqp_edges", symbol_type=lfric_psyir.NumberOfQrPointsInEdgesDataSymbol, interface=self._read_access) weights = self._symbol_table.symbol_from_tag( "weights_edges", symbol_type=lfric_psyir.QrWeightsInEdgesDataSymbol, dims=[Reference(nqp)], interface=self._read_access) self._arglist.extend([nedges, nqp, weights]) else: raise InternalError("Unsupported quadrature shape '{0}' " "found in kernel_interface.".format(shape))
class GOLoop(Loop): ''' The GOcean specific Loop class. This passes the GOcean specific single loop information to the base class so it creates the one we require. Adds a GOcean specific setBounds method which tells the loop what to iterate over. Need to harmonise with the topology_name method in the Dynamo api. ''' def __init__(self, parent=None, topology_name="", loop_type=""): Loop.__init__(self, parent=parent, valid_loop_types=["inner", "outer"]) self.loop_type = loop_type if self._loop_type == "inner": tag = "inner_loop_idx" suggested_name = "i" elif self.loop_type == "outer": tag = "outer_loop_idx" suggested_name = "j" symtab = self.scope.symbol_table try: self.variable = symtab.lookup_with_tag(tag) except KeyError: self.variable = symtab.new_symbol(suggested_name, tag, symbol_type=DataSymbol, datatype=INTEGER_TYPE) # Pre-initialise the Loop children # TODO: See issue #440 self.addchild(Literal("NOT_INITIALISED", INTEGER_TYPE, parent=self)) # start self.addchild(Literal("NOT_INITIALISED", INTEGER_TYPE, parent=self)) # stop self.addchild(Literal("1", INTEGER_TYPE, parent=self)) # step self.addchild(Schedule(parent=self)) # loop body def gen_code(self, parent): if self.field_space == "every": from psyclone.f2pygen import DeclGen from psyclone.psyir.nodes import BinaryOperation dim_var = DeclGen(parent, datatype="INTEGER", entity_decls=[self.variable.name]) parent.add(dim_var) # Update start loop bound self.start_expr = Literal("1", INTEGER_TYPE, parent=self) # Update stop loop bound if self._loop_type == "inner": index = "1" elif self._loop_type == "outer": index = "2" self.stop_expr = BinaryOperation(BinaryOperation.Operator.SIZE, parent=self) self.stop_expr.addchild( Reference(DataSymbol(self.field_name, INTEGER_TYPE), parent=self.stop_expr)) self.stop_expr.addchild( Literal(index, INTEGER_TYPE, parent=self.stop_expr)) else: # one of our spaces so use values provided by the infrastructure # loop bounds # TODO: Issue 440. Implement derive types in PSyIR if self._loop_type == "inner": self.start_expr = Reference(self.field_space + "%istart", parent=self) self.stop_expr = Reference(self.field_space + "%istop", parent=self) elif self._loop_type == "outer": self.start_expr = Reference(self.field_space + "%jstart", parent=self) self.stop_expr = Reference(self.field_space + "%jstop", parent=self) Loop.gen_code(self, parent)
assert getattr(lfric_symbol, attribute) == attribute_map[attribute] lfric_symbol = symbol(*args, interface=ArgumentInterface( ArgumentInterface.Access.READ)) assert isinstance(lfric_symbol.interface, ArgumentInterface) assert lfric_symbol.interface.access == ArgumentInterface.Access.READ # Specific scalar datatypes @pytest.mark.parametrize( "data_type, symbol, scalar_type, dims, attribute_map", [(lfric_psyir.RealFieldDataDataType, lfric_psyir.RealFieldDataDataSymbol, lfric_psyir.LfricRealScalarDataType, [ Reference( lfric_psyir.NumberOfUniqueDofsDataSymbol( "ndofs", "w0", interface=ArgumentInterface(ArgumentInterface.Access.READ))) ], { "fs": "w0" }), (lfric_psyir.IntegerFieldDataDataType, lfric_psyir.IntegerFieldDataDataSymbol, lfric_psyir.LfricIntegerScalarDataType, [ Reference( lfric_psyir.NumberOfUniqueDofsDataSymbol( "ndofs", "w1", interface=ArgumentInterface(ArgumentInterface.Access.READ))) ], { "fs": "w1"
def apply(self, node, options=None): '''Apply the SIGN intrinsic conversion transformation to the specified node. This node must be a SIGN BinaryOperation. The SIGN BinaryOperation is converted to equivalent inline code. This is implemented as a PSyIR transform from: .. code-block:: python R = ... SIGN(A, B) ... to: .. code-block:: python tmp_abs = A if tmp_abs < 0.0: res_abs = tmp_abs*-1.0 else: res_abs = tmp_abs res_sign = res_abs tmp_sign = B if tmp_sign < 0.0: res_sign = res_sign*-1.0 R = ... res_sign ... where ``A`` and ``B`` could be arbitrarily complex PSyIR expressions, ``...`` could be arbitrary PSyIR code and where ``ABS`` has been replaced with inline code by the NemoAbsTrans transformation. This transformation requires the operation node to be a descendent of an assignment and will raise an exception if this is not the case. :param node: a SIGN BinaryOperation node. :type node: :py:class:`psyclone.psyGen.BinaryOperation` :param symbol_table: the symbol table. :type symbol_table: :py:class:`psyclone.psyir.symbols.SymbolTable` :param options: a dictionary with options for transformations. :type options: dictionary of string:values or None ''' # pylint: disable=too-many-locals self.validate(node) schedule = node.root symbol_table = schedule.symbol_table oper_parent = node.parent assignment = node.ancestor(Assignment) # Create two temporary variables. There is an assumption here # that the SIGN Operator returns a PSyIR real type. This might # not be what is wanted (e.g. the args might PSyIR integers), # or there may be errors (arguments are of different types) # but this can't be checked as we don't have the appropriate # methods to query nodes (see #658). res_var = symbol_table.new_symbol_name("res_sign") res_var_symbol = DataSymbol(res_var, REAL_TYPE) symbol_table.add(res_var_symbol) tmp_var = symbol_table.new_symbol_name("tmp_sign") tmp_var_symbol = DataSymbol(tmp_var, REAL_TYPE) symbol_table.add(tmp_var_symbol) # Replace operator with a temporary (res_var). oper_parent.children[node.position] = Reference(res_var_symbol, parent=oper_parent) # res_var=ABS(A) lhs = Reference(res_var_symbol) rhs = UnaryOperation.create(UnaryOperation.Operator.ABS, node.children[0]) new_assignment = Assignment.create(lhs, rhs) new_assignment.parent = assignment.parent assignment.parent.children.insert(assignment.position, new_assignment) # Replace the ABS intrinsic with inline code. abs_trans = Abs2CodeTrans() abs_trans.apply(rhs, symbol_table) # tmp_var=B lhs = Reference(tmp_var_symbol) new_assignment = Assignment.create(lhs, node.children[1]) new_assignment.parent = assignment.parent assignment.parent.children.insert(assignment.position, new_assignment) # if_condition: tmp_var<0.0 lhs = Reference(tmp_var_symbol) rhs = Literal("0.0", REAL_TYPE) if_condition = BinaryOperation.create(BinaryOperation.Operator.LT, lhs, rhs) # then_body: res_var=res_var*-1.0 lhs = Reference(res_var_symbol) lhs_child = Reference(res_var_symbol) rhs_child = Literal("-1.0", REAL_TYPE) rhs = BinaryOperation.create(BinaryOperation.Operator.MUL, lhs_child, rhs_child) then_body = [Assignment.create(lhs, rhs)] # if [if_condition] then [then_body] if_stmt = IfBlock.create(if_condition, then_body) if_stmt.parent = assignment.parent assignment.parent.children.insert(assignment.position, if_stmt)
def apply(self, node, options=None): '''Apply this transformation to the supplied node. :param node: the node to transform. :type node: :py:class:`psyclone.gocean1p0.GOKern` :param options: a dictionary with options for transformations. :type options: dict of string:values or None :returns: 2-tuple of new schedule and memento of transform. :rtype: (:py:class:`psyclone.gocean1p0.GOInvokeSchedule`, \ :py:class:`psyclone.undoredo.Memento`) ''' self.validate(node, options) # Get useful references invoke_st = node.ancestor(InvokeSchedule).symbol_table inner_loop = node.ancestor(Loop) outer_loop = inner_loop.ancestor(Loop) cursor = outer_loop.position # Make sure the boundary symbols in the PSylayer exist inv_xstart = invoke_st.symbol_from_tag("xstart_" + node.name, root_name="xstart", symbol_type=DataSymbol, datatype=INTEGER_TYPE) inv_xstop = invoke_st.symbol_from_tag("xstop_" + node.name, root_name="xstop", symbol_type=DataSymbol, datatype=INTEGER_TYPE) inv_ystart = invoke_st.symbol_from_tag("ystart_" + node.name, root_name="ystart", symbol_type=DataSymbol, datatype=INTEGER_TYPE) inv_ystop = invoke_st.symbol_from_tag("ystop_" + node.name, root_name="ystop", symbol_type=DataSymbol, datatype=INTEGER_TYPE) # If the kernel acts on the whole iteration space, the boundary values # are not needed. This also avoids adding duplicated arguments if this # transformation is applied more than once to the same kernel. But the # declaration and initialisation above still needs to exist because the # boundary variables are expected to exist by the generation code. if (inner_loop.field_space == "go_every" and outer_loop.field_space == "go_every" and inner_loop.iteration_space == "go_all_pts" and outer_loop.iteration_space == "go_all_pts"): return node.root, None # Initialise the boundary values provided by the Loop construct assign1 = Assignment.create(Reference(inv_xstart), inner_loop.lower_bound()) outer_loop.parent.children.insert(cursor, assign1) cursor = cursor + 1 assign2 = Assignment.create(Reference(inv_xstop), inner_loop.upper_bound()) outer_loop.parent.children.insert(cursor, assign2) cursor = cursor + 1 assign3 = Assignment.create(Reference(inv_ystart), outer_loop.lower_bound()) outer_loop.parent.children.insert(cursor, assign3) cursor = cursor + 1 assign4 = Assignment.create(Reference(inv_ystop), outer_loop.upper_bound()) outer_loop.parent.children.insert(cursor, assign4) # Update Kernel Call argument list for symbol in [inv_xstart, inv_xstop, inv_ystart, inv_ystop]: node.arguments.append(symbol.name, "go_i_scalar") # Now that the boundaries are inside the kernel, the looping should go # through all the field points inner_loop.field_space = "go_every" outer_loop.field_space = "go_every" inner_loop.iteration_space = "go_all_pts" outer_loop.iteration_space = "go_all_pts" # Update Kernel kschedule = node.get_kernel_schedule() kernel_st = kschedule.symbol_table iteration_indices = kernel_st.iteration_indices data_arguments = kernel_st.data_arguments # Create new symbols and insert them as kernel arguments at the end of # the kernel argument list xstart_symbol = kernel_st.new_symbol( "xstart", symbol_type=DataSymbol, datatype=INTEGER_TYPE, interface=ArgumentInterface(ArgumentInterface.Access.READ)) xstop_symbol = kernel_st.new_symbol("xstop", symbol_type=DataSymbol, datatype=INTEGER_TYPE, interface=ArgumentInterface( ArgumentInterface.Access.READ)) ystart_symbol = kernel_st.new_symbol( "ystart", symbol_type=DataSymbol, datatype=INTEGER_TYPE, interface=ArgumentInterface(ArgumentInterface.Access.READ)) ystop_symbol = kernel_st.new_symbol("ystop", symbol_type=DataSymbol, datatype=INTEGER_TYPE, interface=ArgumentInterface( ArgumentInterface.Access.READ)) kernel_st.specify_argument_list( iteration_indices + data_arguments + [xstart_symbol, xstop_symbol, ystart_symbol, ystop_symbol]) # Create boundary masking conditions condition1 = BinaryOperation.create(BinaryOperation.Operator.LT, Reference(iteration_indices[0]), Reference(xstart_symbol)) condition2 = BinaryOperation.create(BinaryOperation.Operator.GT, Reference(iteration_indices[0]), Reference(xstop_symbol)) condition3 = BinaryOperation.create(BinaryOperation.Operator.LT, Reference(iteration_indices[1]), Reference(ystart_symbol)) condition4 = BinaryOperation.create(BinaryOperation.Operator.GT, Reference(iteration_indices[1]), Reference(ystop_symbol)) condition = BinaryOperation.create( BinaryOperation.Operator.OR, BinaryOperation.create(BinaryOperation.Operator.OR, condition1, condition2), BinaryOperation.create(BinaryOperation.Operator.OR, condition3, condition4)) # Insert the conditional mask as the first statement of the kernel if_statement = IfBlock.create(condition, [Return()]) kschedule.children.insert(0, if_statement) if_statement.parent = kschedule return node.root, None