Exemple #1
0
def get_mesh(metadata, valid_mesh_types):
    '''
    Returns the mesh-type described by the supplied meta-data

    :param  metadata: node in parser ast
    :type metadata: py:class:`psyclone.expression.NamedArg`
    :param valid_mesh_types: List of valid mesh types
    :type valid_mesh_types: list of strings

    :return: the name of the mesh
    :rtype: string

    :raises ParseError: if the supplied meta-data is not a recognised \
                        mesh identifier.
    :raises ParseError: if the mesh type is unsupported.

    '''
    if not isinstance(metadata, expr.NamedArg) or \
       metadata.name.lower() != "mesh_arg":
        raise ParseError(
            "{0} is not a valid mesh identifier (expected "
            "mesh_arg=MESH_TYPE where MESH_TYPE is one of {1}))".format(
                str(metadata), valid_mesh_types))
    mesh = metadata.value.lower()
    if mesh not in valid_mesh_types:
        raise ParseError("mesh_arg must be one of {0} but got {1}".format(
            valid_mesh_types, mesh))
    return mesh
Exemple #2
0
    def __init__(self, ast, name=None):

        if name is None:
            # if no name is supplied then use the module name to
            # determine the type name. The assumed convention is that
            # the module is called <name/>_mod and the type is called
            # <name/>_type
            found = False
            for statement, _ in fpapi.walk(ast):
                if isinstance(statement, fparser1.block_statements.Module):
                    module_name = statement.name
                    found = True
                    break
            if not found:
                raise ParseError(
                    "Error KernelType, the file does not contain a module. "
                    "Is it a Kernel file?")

            mn_len = len(module_name)
            if mn_len < 5:
                raise ParseError(
                    "Error, module name '{0}' is too short to have '_mod' as "
                    "an extension. This convention is assumed.".format(
                        module_name))
            base_name = module_name.lower()[:mn_len - 4]
            extension_name = module_name.lower()[mn_len - 4:mn_len]
            if extension_name != "_mod":
                raise ParseError(
                    "Error, module name '{0}' does not have '_mod' as an "
                    "extension. This convention is assumed.".format(
                        module_name))
            name = base_name + "_type"

        self._name = name
        self._ast = ast
        self._ktype = get_kernel_metadata(name, ast)
        operates_on = self.get_integer_variable("operates_on")
        # The GOcean API still uses the 'iterates_over' metadata entry
        # although this is deprecated in the LFRic API.
        # Validation is left to the API-specific code in either dynamo0p3.py
        # or gocean1p0.py.
        iterates_over = self.get_integer_variable("iterates_over")
        if operates_on:
            self._iterates_over = operates_on
        elif iterates_over:
            self._iterates_over = iterates_over
        else:
            # We don't raise an error here - we leave it to the API-specific
            # validation code.
            self._iterates_over = None
        # Although validation of the value given to operates_on or
        # iterates_over is API-specifc, we can check that the metadata doesn't
        # specify both of them because that doesn't make sense.
        if operates_on and iterates_over:
            raise ParseError("The metadata for kernel '{0}' contains both "
                             "'operates_on' and 'iterates_over'. Only one of "
                             "these is permitted.".format(name))
        self._procedure = KernelProcedure(self._ktype, name, ast)
        self._inits = getkerneldescriptors(name, self._ktype)
        self._arg_descriptors = []  # this is set up by the subclasses
 def _validate(self):
     ''' Check that this built-in conforms to the Dynamo 0.3 API '''
     write_count = 0  # Only one argument must be written to
     field_count = 0  # We must have one or more fields as arguments
     spaces = set()   # All field arguments must be on the same space
     for arg in self.arg_descriptors:
         if arg.access in [AccessType.WRITE, AccessType.SUM,
                           AccessType.INC]:
             write_count += 1
         if arg.type == "gh_field":
             field_count += 1
             spaces.add(arg.function_space)
         if arg.type not in VALID_BUILTIN_ARG_TYPES:
             raise ParseError(
                 "In the Dynamo 0.3 API an argument to a built-in kernel "
                 "must be one of {0} but kernel {1} has an argument of "
                 "type {2}".format(VALID_BUILTIN_ARG_TYPES, self.name,
                                   arg.type))
     if write_count != 1:
         raise ParseError("A built-in kernel in the Dynamo 0.3 API must "
                          "have one and only one argument that is written "
                          "to but found {0} for kernel {1}".
                          format(write_count, self.name))
     if field_count == 0:
         raise ParseError("A built-in kernel in the Dynamo 0.3 API "
                          "must have at least one field as an argument but "
                          "kernel {0} has none.".format(self.name))
     if len(spaces) != 1:
         raise ParseError(
             "All field arguments to a built-in in the Dynamo 0.3 API "
             "must be on the same space. However, found spaces {0} for "
             "arguments to {1}".format(sorted(spaces), self.name))
Exemple #4
0
    def _validate_vector_size(self, separator, arg_type):
        '''
        Validates descriptors for field vector arguments and populates
        vector properties accordingly.

        :param str separator: operator in a binary expression.
        :param arg_type: LFRic API field (vector) argument type.
        :type arg_type: :py:class:`psyclone.expression.FunctionVar`

        :raises ParseError: if the field vector notation does not use \
                            the '*' operator.
        :raises ParseError: if the field vector notation is not in the \
                            correct format '(field*n)' where 'n' is \
                            an integer.
        :raises ParseError: if the field vector notation is used for the \
                            vector size of less than 2.
        :raises ParseError: if the field vector notation is used for an \
                            argument that is not a field.

        '''
        # Check that the operator is correct
        if separator != "*":
            raise ParseError(
                "In the LFRic API the 1st argument of a 'meta_arg' "
                "entry may be a field vector but if so must use '*' as "
                "the separator in the format 'field*n', but found "
                "'{0}' in '{1}'.".format(separator, arg_type))

        # Now try to find the vector size for a field vector and return
        # an error if it is not an integer number...
        try:
            vectsize = int(arg_type.args[0].toks[2])
        except TypeError as err:
            six.raise_from(
                ParseError(
                    "In the LFRic API, the field vector notation must be in "
                    "the format 'field*n' where 'n' is an integer, but the "
                    "following '{0}' was found in '{1}'.".format(
                        str(arg_type.args[0].toks[2]), arg_type)), err)
        # ... or it is less than 2 (1 is the default for all fields)...

        const = LFRicConstants()
        if vectsize < 2:
            raise ParseError(
                "In the LFRic API the 1st argument of a 'meta_arg' entry "
                "may be a field vector with format 'field*n' where n is "
                "an integer > 1. However, found n = {0} in '{1}'.".format(
                    vectsize, arg_type))
        # ... and set the vector size if all checks pass
        self._vector_size = vectsize

        # Check that no other arguments than fields use vector notation
        if self._argument_type not in \
           const.VALID_FIELD_NAMES and self._vector_size:
            raise ParseError(
                "In the LFRic API, vector notation is only supported "
                "for {0} argument types but found '{1}'.".format(
                    const.VALID_FIELD_NAMES, arg_type.args[0]))
Exemple #5
0
    def create_invoke_call(self, statement):
        '''Takes the part of a parse tree containing an invoke call and
        returns an InvokeCall object which captures the required
        information about the invoke.

        :param statement: Parse tree of the invoke call.
        :type statement: :py:class:`fparser.two.Fortran2003.Call_Stmt`
        :returns: An InvokeCall object which contains relevant \
        information about the invoke call.
        :rtype: :py:class:`psyclone.parse.algorithm.InvokeCall`
        :raises ParseError: if more than one invoke argument contains \
        'name=xxx'.
        :raises ParseError: if an unknown or unsupported invoke \
        argument is found.

        '''
        # Extract argument list. This if construct can be removed
        # when fparser#170 is implemented.
        argument_list = []
        if isinstance(statement.items[1], Actual_Arg_Spec_List):
            argument_list = statement.items[1].items
        else:
            # Expecting a single entry rather than a list
            argument_list = [statement.items[1]]

        invoke_label = None
        kernel_calls = []

        for argument in argument_list:

            if isinstance(argument, Actual_Arg_Spec):
                # This should be the invoke label.
                if invoke_label:
                    raise ParseError(
                        "algorithm.py:Parser():create_invoke_call: An invoke "
                        "must contain one or zero 'name=xxx' arguments but "
                        "found more than one in: {0} in file {1}".format(
                            str(statement), self._alg_filename))
                invoke_label = self.check_invoke_label(argument)

            elif isinstance(argument, (Data_Ref, Part_Ref)):
                # This should be a kernel call.
                kernel_call = self.create_kernel_call(argument)
                kernel_calls.append(kernel_call)

            else:
                # Unknown and/or unsupported argument type
                raise ParseError(
                    "algorithm.py:Parser():create_invoke_call: Expecting "
                    "argument to be of the form 'name=xxx' or a "
                    "Kernel call but found '{0}' in file "
                    "'{1}'.".format(argument, self._alg_filename))

        return InvokeCall(kernel_calls, name=invoke_label)
Exemple #6
0
    def _validate(self):
        '''
        Check that this built-in conforms to the LFRic API.

        :raises ParseError: if a built-in call does not iterate over DoFs.
        :raises ParseError: if an argument to a built-in kernel is not \
                            one of valid argument types.
        :raises ParseError: if a built-in kernel writes to more than \
                            one argument.
        :raises ParseError: if a built-in kernel does not have at least \
                            one field argument.
        :raises ParseError: if all field arguments are not on the same space.

        '''
        # Check that our assumption that we're looping over DoFs is valid
        if self.iterates_over not in BUILTIN_ITERATION_SPACES:
            raise ParseError("In the LFRic API built-in calls must operate on "
                             "DoFs but found '{0}' for {1}.".format(
                                 self.iterates_over, str(self)))
        # Check write count, field arguments and spaces
        write_count = 0  # Only one argument must be written to
        field_count = 0  # We must have one or more fields as arguments
        spaces = set()  # All field arguments must be on the same space
        for arg in self.arg_descriptors:
            # Built-ins update fields DoF by DoF and therefore can have
            # WRITE/READWRITE access
            if arg.access in [
                    AccessType.WRITE, AccessType.SUM, AccessType.READWRITE
            ]:
                write_count += 1
            if arg.argument_type in LFRicArgDescriptor.VALID_FIELD_NAMES:
                field_count += 1
                spaces.add(arg.function_space)
            if arg.argument_type not in VALID_BUILTIN_ARG_TYPES:
                raise ParseError(
                    "In the LFRic API an argument to a built-in kernel "
                    "must be one of {0} but kernel '{1}' has an argument of "
                    "type '{2}'.".format(VALID_BUILTIN_ARG_TYPES, self.name,
                                         arg.argument_type))
        if write_count != 1:
            raise ParseError("A built-in kernel in the LFRic API must "
                             "have one and only one argument that is written "
                             "to but found {0} for kernel '{1}'.".format(
                                 write_count, self.name))
        if field_count == 0:
            raise ParseError("A built-in kernel in the LFRic API "
                             "must have at least one field as an argument but "
                             "kernel '{0}' has none.".format(self.name))
        if len(spaces) != 1:
            spaces_str = [str(x) for x in sorted(spaces)]
            raise ParseError(
                "All field arguments to a built-in in the LFRic API "
                "must be on the same space. However, found spaces {0} for "
                "arguments to '{1}'".format(spaces_str, self.name))
Exemple #7
0
    def create(self, builtin_names, builtin_defs_file, name=None):
        '''Create API-specific information about the builtin metadata. This
        method finds and parses the metadata then makes use of the
        KernelTypeFactory parent class to return the api-specific
        information about the builtin.

        :param builtin_names: a list of valid builtin names
        :type builtin_names: list of str
        :param str builtin_defs_file: the file containing builtin \
        metadata
        :param name: the name of the builtin. Defaults to None if \
        one is not provided.
        :type name: str or NoneType

        :raises ParseError: if the supplied name is not one of the \
        builtin names
        :raises ParseError: if the supplied name is recognised as a \
        builtin but the associated file containing the required \
        metadata can not be found.
        :raises ParseError: if the metadata for the supplied builtin \
        can not be parsed.

        '''
        if name not in builtin_names:
            raise ParseError(
                "BuiltInKernelTypeFactory:create unrecognised built-in name. "
                "Got '{0}' but expected one of {1}".format(name,
                                                           builtin_names))
        # The meta-data for these lives in a Fortran module file
        # passed in to this method.
        fname = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            builtin_defs_file)
        if not os.path.isfile(fname):
            raise ParseError(
                "BuiltInKernelTypeFactory:create Kernel '{0}' is a recognised "
                "Built-in but cannot find file '{1}' containing the meta-data "
                "describing the Built-in operations for API '{2}'"
                .format(name, fname, self._type))
        # Attempt to parse the meta-data
        try:
            parsefortran.FortranParser.cache.clear()
            fparser.logging.disable(fparser.logging.CRITICAL)
            parse_tree = fpapi.parse(fname)
        except Exception:
            raise ParseError(
                "BuiltInKernelTypeFactory:create: Failed to parse the meta-"
                "data for PSyclone built-ins in file '{0}'.".format(fname))

        # Now we have the parse tree, call our parent class to create \
        # the object
        return KernelTypeFactory.create(self, parse_tree, name)
Exemple #8
0
def get_kernel_metadata(name, ast):
    '''Takes the kernel module parse tree and returns the metadata part
    of the parse tree (a Fortran type) with the name 'name'.

    :param str name: the metadata name (of a Fortran type). Also \
    the name referencing the kernel in the algorithm layer. The name \
    provided and the name of the kernel in the parse tree are case \
    insensitive in this function.
    :param ast: parse tree of the kernel module code
    :type ast: :py:class:`fparser.one.block_statements.BeginSource`

    :returns: Parse tree of the metadata (a Fortran type with name \
    'name')
    :rtype: :py:class:`fparser.one.block_statements.Type`

    :raises ParseError: if the metadata type name is not found in \
    the kernel code parse tree

    '''
    ktype = None
    for statement, _ in fpapi.walk(ast, -1):
        if isinstance(statement, fparser1.block_statements.Type) \
           and statement.name.lower() == name.lower():
            ktype = statement
            break
    if ktype is None:
        raise ParseError("Kernel type {0} does not exist".format(name))
    return ktype
Exemple #9
0
    def create(self, parse_tree, name=None):
        '''Create API-specific information about the kernel metadata and a
        reference to its code. The API is set when the factory is
        created.

        :param parse_tree: The fparser1 parse tree for the Kernel code.
        :type parse_tree: :py:class:`fparser.one.block_statements.BeginSource`

        :param name: the name of the Kernel. Defaults to None if \
        one is not provided.
        :type name: str or NoneType

        :raises ParseError: if the supplied API is not supported.

        '''
        if self._type == "dynamo0.1":
            from psyclone.dynamo0p1 import DynKernelType
            return DynKernelType(parse_tree, name=name)
        elif self._type == "dynamo0.3":
            from psyclone.dynamo0p3 import DynKernMetadata
            return DynKernMetadata(parse_tree, name=name)
        elif self._type == "gocean0.1":
            from psyclone.gocean0p1 import GOKernelType
            return GOKernelType(parse_tree, name=name)
        elif self._type == "gocean1.0":
            from psyclone.gocean1p0 import GOKernelType1p0
            return GOKernelType1p0(parse_tree, name=name)
        else:
            raise ParseError(
                "KernelTypeFactory:create: Unsupported kernel type '{0}' "
                "found.".format(self._type))
Exemple #10
0
def get_kernel_parse_tree(filepath):
    '''Parse the file in filepath with fparser1 and return a parse tree.

    :param str filepath: path to a file (hopefully) containing \
    PSyclone kernel code.

    :returns: Parse tree of the kernel code contained in the specified \
    file.
    :rtype: :py:class:`fparser.one.block_statements.BeginSource`

    :raises ParseError: if fparser fails to parse the file

    '''
    parsefortran.FortranParser.cache.clear()
    fparser.logging.disable(fparser.logging.CRITICAL)
    try:
        parse_tree = fpapi.parse(filepath)
        # parse_tree includes an extra comment line which contains
        # file details. This line can be long which can cause line
        # length issues. Therefore set the information (name) to be
        # empty.
        parse_tree.name = ""
    except Exception:
        raise ParseError("Failed to parse kernel code '{0}'. Is the Fortran "
                         "correct?".format(filepath))
    return parse_tree
Exemple #11
0
    def create_builtin_kernel_call(self, kernel_name, args):
        '''Takes the builtin kernel name and a list of Arg objects which
        capture information about the builtin call arguments and
        returns a BuiltinCall instance with content specific to the
        particular API (as specified in self._api).

        :param str kernel_name: the name of the builtin kernel being \
        called
        :param args: a list of 'Arg' instances containing the required \
        information for the arguments being passed from the algorithm \
        layer. The list order is the same as the argument order.
        :type arg: list of :py:class:`psyclone.parse.algorithm.Arg`
        :returns: a BuiltInCall instance with information specific to \
        the API.
        :rtype: :py:class:`psyclone.parse.algorithm.BuiltInCall`
        :raises ParseError: if the builtin is specified in a use \
        statement in the algorithm layer

        '''
        if kernel_name.lower() in self._arg_name_to_module_name:
            raise ParseError(
                "A built-in cannot be named in a use "
                "statement but '{0}' is used from "
                "module '{1}' in file {2}".format(
                    kernel_name,
                    self._arg_name_to_module_name[kernel_name.lower()],
                    self._alg_filename))

        from psyclone.parse.kernel import BuiltInKernelTypeFactory
        return BuiltInCall(
            BuiltInKernelTypeFactory(api=self._api).create(
                self._builtin_name_map.keys(),
                self._builtin_defs_file,
                name=kernel_name.lower()), args)
Exemple #12
0
def generate(filename, api=""):
    '''
    Generates an empty kernel subroutine with the required arguments
    and datatypes (which we call a stub) when presented with Kernel
    Metadata. This is useful for Kernel developers to make sure
    they are using the correct arguments in the correct order.  The
    Kernel Metadata must be presented in the standard Kernel
    format.

    :param str filename: the name of the file for which to create a \
                         kernel stub for.
    :param str api: the name of the API for which to create a kernel \
                    stub. Must be one of the supported stub APIs.

    :returns: root of fparser1 parse tree for the stub routine.
    :rtype: :py:class:`fparser.one.block_statements.Module`

    :raises GenerationError: if an invalid stub API is specified.
    :raises IOError: if filename does not specify a file.
    :raises ParseError: if the given file could not be parsed.
    :raises GenerationError: if a kernel stub does not have a supported \
                             iteration space (currently only "cells").

    '''
    if api == "":
        api = Config.get().default_stub_api
    if api not in Config.get().supported_stub_apis:
        raise GenerationError(
            "Kernel stub generator: Unsupported API '{0}' specified. "
            "Supported APIs are {1}.".format(api,
                                             Config.get().supported_stub_apis))

    if not os.path.isfile(filename):
        raise IOError(
            "Kernel stub generator: File '{0}' not found.".format(filename))

    # Drop cache
    fparser.one.parsefortran.FortranParser.cache.clear()
    fparser.logging.disable(fparser.logging.CRITICAL)
    try:
        ast = fparser.api.parse(filename, ignore_comments=False)

    except (fparser.common.utils.AnalyzeError, AttributeError) as error:
        raise ParseError("Kernel stub generator: Code appears to be invalid "
                         "Fortran: {0}.".format(str(error)))

    metadata = DynKernMetadata(ast)
    kernel = DynKern()
    kernel.load_meta(metadata)

    # Check kernel iteration space before generating code
    if (api == "dynamo0.3"
            and kernel.iterates_over not in USER_KERNEL_ITERATION_SPACES):
        raise GenerationError(
            "The LFRic API kernel stub generator supports kernels that operate"
            " on one of {0}, but found '{1}' in kernel '{2}'.".format(
                USER_KERNEL_ITERATION_SPACES, kernel.iterates_over,
                kernel.name))

    return kernel.gen_stub
Exemple #13
0
    def get_integer_variable(self, name):
        ''' Parse the kernel meta-data and find the value of the
        integer variable with the supplied name. Return None if no
        matching variable is found.

        :param str name: the name of the integer variable to find.

        :returns: value of the specified integer variable or None.
        :rtype: str

        :raises ParseError: if the RHS of the assignment is not a Name.

        '''
        # Ensure the Fortran2003 parser is initialised
        _ = ParserFactory().create()

        for statement, _ in fpapi.walk(self._ktype, -1):
            if isinstance(statement, fparser1.typedecl_statements.Integer):
                # fparser only goes down to the statement level. We use
                # fparser2 to parse the statement itself (eventually we'll
                # use fparser2 to parse the whole thing).
                assign = Fortran2003.Assignment_Stmt(statement.entity_decls[0])
                if str(assign.items[0]) == name:
                    if not isinstance(assign.items[2], Fortran2003.Name):
                        raise ParseError(
                            "get_integer_variable: RHS of assignment is not "
                            "a variable name: '{0}'".format(str(assign)))
                    return str(assign.items[2])
        return None
Exemple #14
0
    def reference_accesses(self, var_accesses):
        '''Get all variable access information from this node. The assigned-to
        variable will be set to 'WRITE'.

        :param var_accesses: VariablesAccessInfo instance that stores the \
            information about variable accesses.
        :type var_accesses: \
            :py:class:`psyclone.core.access_info.VariablesAccessInfo`

        '''
        # It is important that a new instance is used to handle the LHS,
        # since a check in 'change_read_to_write' makes sure that there
        # is only one access to the variable!
        accesses_left = VariablesAccessInfo()
        self.lhs.reference_accesses(accesses_left)

        # Now change the (one) access to the assigned variable to be WRITE:
        if isinstance(self.lhs, CodeBlock):
            # TODO #363: Assignment to user defined type, not supported yet.
            # Here an absolute hack to get at least some information out
            # from the AST - though indices are just strings, which will
            # likely cause problems later as well.
            name = str(self.lhs.ast)
            # A regular expression that tries to find the last parenthesis
            # pair in the name ("a(i,j)" --> "(i,j)")
            ind = re.search(r"\([^\(]+\)$", name)
            if ind:
                # Remove the index part of the name
                name = name.replace(ind.group(0), "")
                # The index must be added as a list
                accesses_left.add_access(name, AccessType.WRITE, self,
                                         [ind.group(0)])
            else:
                accesses_left.add_access(name, AccessType.WRITE, self)
        else:
            var_info = accesses_left[self.lhs.name]
            try:
                var_info.change_read_to_write()
            except InternalError:
                # An internal error typically indicates that the same variable
                # is used twice on the LHS, e.g.: g(g(1)) = ... This is not
                # supported in PSyclone.
                from psyclone.parse.utils import ParseError
                raise ParseError(
                    "The variable '{0}' appears more than once "
                    "on the left-hand side of an assignment.".format(
                        self.lhs.name))

        # Merge the data (that shows now WRITE for the variable) with the
        # parameter to this function. It is important that first the
        # RHS is added, so that in statements like 'a=a+1' the read on
        # the RHS comes before the write on the LHS (they have the same
        # location otherwise, but the order is still important)
        self.rhs.reference_accesses(var_accesses)
        var_accesses.merge(accesses_left)
        var_accesses.next_location()
Exemple #15
0
 def __init__(self, ast, name=None):
     KernelType.__init__(self, ast, name=name)
     self._arg_descriptors = []
     for init in self._inits:
         if init.name != 'arg':
             raise ParseError(
                 "gocean0p1.py:GOKernelType:__init__: Each meta_arg value "
                 "must be of type 'arg' for the gocean0.1 api, but found "
                 "'{0}'.".format(init.name))
         access = init.args[0].name
         funcspace = init.args[1].name
         stencil = init.args[2].name
         if len(init.args) != 3:
             raise ParseError(
                 "gocean0p1.py:GOKernelType:__init__: 'arg' type expects "
                 "3 arguments but found {0} in '{1}'".format(
                     str(len(init.args)), init.args))
         self._arg_descriptors.append(
             GODescriptor(access, funcspace, stencil))
Exemple #16
0
    def __init__(self, ast, name=None):

        if name is None:
            # if no name is supplied then use the module name to
            # determine the type name. The assumed convention is that
            # the module is called <name/>_mod and the type is called
            # <name/>_type
            found = False
            for statement, _ in fpapi.walk(ast, -1):
                if isinstance(statement, fparser1.block_statements.Module):
                    module_name = statement.name
                    found = True
                    break
            if not found:
                raise ParseError(
                    "Error KernelType, the file does not contain a module. "
                    "Is it a Kernel file?")

            mn_len = len(module_name)
            if mn_len < 5:
                raise ParseError(
                    "Error, module name '{0}' is too short to have '_mod' as "
                    "an extension. This convention is assumed.".format(
                        module_name))
            base_name = module_name.lower()[:mn_len - 4]
            extension_name = module_name.lower()[mn_len - 4:mn_len]
            if extension_name != "_mod":
                raise ParseError(
                    "Error, module name '{0}' does not have '_mod' as an "
                    "extension. This convention is assumed.".format(
                        module_name))
            name = base_name + "_type"

        self._name = name
        self._ast = ast
        self._ktype = get_kernel_metadata(name, ast)
        self._iterates_over = self.get_integer_variable("iterates_over")
        self._procedure = KernelProcedure(self._ktype, name, ast)
        self._inits = self.getkerneldescriptors(self._ktype)
        self._arg_descriptors = []  # this is set up by the subclasses
Exemple #17
0
 def __init__(self, ktype, args):
     self._ktype = ktype
     self._args = args
     if len(self._args) < self._ktype.nargs:
         # we cannot test for equality here as API's may have extra
         # arguments passed in from the algorithm layer (e.g. 'QR'
         # in dynamo0.3), but we do expect there to be at least the
         # same number of real arguments as arguments specified in
         # the metadata.
         raise ParseError(
             "Kernel '{0}' called from the algorithm layer with an "
             "insufficient number of arguments as specified by the "
             "metadata. Expected at least '{1}' but found '{2}'.".format(
                 self._ktype.name, self._ktype.nargs, len(self._args)))
     self._module_name = None
Exemple #18
0
    def get_integer_array(self, name):
        ''' Parse the kernel meta-data and find the values of the
        integer array variable with the supplied name. Returns an empty list
        if no matching variable is found.

        :param str name: the name of the integer array to find.
        :returns: list of values.
        :rtype: list of str.

        :raises InternalError: if we fail to parse the LHS of the array \
                               declaration or the array constructor.
        :raises ParseError: if the RHS of the declaration is not an array \
                            constructor.

        '''
        # Ensure the classes are setup for the Fortran2003 parser
        _ = ParserFactory().create()

        for statement, _ in fpapi.walk(self._ktype, -1):
            if not isinstance(statement, fparser1.typedecl_statements.Integer):
                # This isn't an integer declaration so skip it
                continue
            # fparser only goes down to the statement level. We use fparser2 to
            # parse the statement itself.
            assign = Fortran2003.Assignment_Stmt(statement.entity_decls[0])
            names = walk_ast(assign.items, [Fortran2003.Name])
            if not names:
                raise InternalError(
                    "Unsupported assignment statement: '{0}'".format(
                        str(assign)))
            if str(names[0]) == name:
                # This is the variable declaration we're looking for
                if not isinstance(assign.items[2],
                                  Fortran2003.Array_Constructor):
                    raise ParseError(
                        "get_integer_array: RHS of assignment is not "
                        "an array constructor: '{0}'".format(str(assign)))
                # fparser2 AST for Array_Constructor is:
                # Array_Constructor('[', Ac_Value_List(',', (Name('w0'),
                #                                      Name('w1'))), ']')
                # Construct a list of the names in the array constructor
                names = walk_ast(assign.items[2].items, [Fortran2003.Name])
                if not names:
                    raise InternalError("Failed to parse array constructor: "
                                        "'{0}'".format(str(assign.items[2])))
                return [str(name) for name in names]
        return []
Exemple #19
0
 def __init__(self, ast, name=None):
     KernelType.__init__(self, ast, name=name)
     self._arg_descriptors = []
     for init in self._inits:
         if init.name != 'arg_type':
             raise ParseError(
                 "dynamo0p1.py:DynKernelType:__init__: Each meta_arg "
                 "value must be of type 'arg_type' for the "
                 "dynamo0.1 api, but found '{0}'.".format(init.name))
         access = init.args[0].name
         funcspace = init.args[1].name
         stencil = init.args[2].name
         x1 = init.args[3].name
         x2 = init.args[4].name
         x3 = init.args[5].name
         self._arg_descriptors.append(
             DynDescriptor(access, funcspace, stencil, x1, x2, x3))
Exemple #20
0
def generate(filename, api=""):

    '''Generates an empty kernel subroutine with the required arguments
       and datatypes (which we call a stub) when presented with Kernel
       Metadata. This is useful for Kernel developers to make sure
       they are using the correct arguments in the correct order.  The
       Kernel Metadata must be presented in the standard Kernel
       format.

       :param str filename: The name of the file for which to create a \
               kernel stub for.
       :param str api: The name of the API for which to create a kernel \
              stub. Must be one of the supported stub APIs.

       :raise GenerationError: if an invalid stub API is specified.
       :raise IOError: if filename does not specify a file.
       :raise ParseError: if the given file could not be parsed.
    '''
    if api == "":
        api = Config.get().default_stub_api
    if api not in Config.get().supported_stub_apis:
        print("Unsupported API '{0}' specified. Supported API's are {1}.".
              format(api, Config.get().supported_stub_apis))
        raise GenerationError(
            "generate: Unsupported API '{0}' specified. Supported types are "
            "{1}.".format(api, Config.get().supported_stub_apis))

    if not os.path.isfile(filename):
        raise IOError("file '{0}' not found".format(filename))

    # drop cache
    fparser.one.parsefortran.FortranParser.cache.clear()
    fparser.logging.disable(fparser.logging.CRITICAL)
    try:
        ast = fparser.api.parse(filename, ignore_comments=False)

    except (fparser.common.utils.AnalyzeError, AttributeError) as error:
        raise ParseError("Code appears to be invalid Fortran: " +
                         str(error))

    metadata = DynKernMetadata(ast)
    kernel = DynKern()
    kernel.load_meta(metadata)
    return kernel.gen_stub
Exemple #21
0
    def create(call, parent=None):
        '''
        Create the objects needed for a call to the built-in described in
        the call (BuiltInCall) object.

        :param call: details of the call to this built-in in the \
                     Algorithm layer.
        :type call: :py:class:`psyclone.parse.algorithm.BuiltInCall`
        :param parent: the schedule instance to which the built-in call \
                       belongs.
        :type parent: :py:class:`psyclone.dynamo0p3.DynInvokeSchedule`

        :raises ParseError: if the name of the function being called is \
                            not a recognised built-in.

        '''
        if call.func_name not in BUILTIN_MAP:
            raise ParseError(
                "Unrecognised built-in call in LFRic API: found '{0}' but "
                "expected one of {1}.".format(
                    call.func_name, list(BUILTIN_MAP_CAPITALISED.keys())))

        # Use our dictionary to get the correct Python object for
        # this built-in.
        builtin = BUILTIN_MAP[call.func_name]()

        # Create the loop over DoFs
        from psyclone.dynamo0p3 import DynLoop
        dofloop = DynLoop(parent=parent, loop_type=BUILTIN_ITERATION_SPACES[0])

        # Use the call object (created by the parser) to set-up the state
        # of the infrastructure kernel
        builtin.load(call, parent=dofloop)

        # Set-up its state
        dofloop.load(builtin)

        # As it is the innermost loop it has the kernel as a loop_body
        # child.
        dofloop.loop_body.addchild(builtin)
        builtin.parent = dofloop.loop_body

        # Return the outermost loop
        return dofloop
Exemple #22
0
def get_kernel_interface(name, ast):
    '''Takes the kernel module parse tree and returns the interface part
    of the parse tree.

    :param str name: The kernel name
    :param ast: parse tree of the kernel module code
    :type ast: :py:class:`fparser.one.block_statements.BeginSource`

    :returns: Name of the interface block and the names of the module \
              procedures (lower case). Or None, None if there is no \
              interface or the interface has no nodule procedures.
    :rtype: : `str`, list of str`.

    :raises ParseError: if more than one interface is found.
    '''

    iname = None
    sname = None
    count = 0
    for statement, _ in fpapi.walk(ast):
        if isinstance(statement, fparser1.block_statements.Interface):
            # count the interfaces, then can be only one!
            count = count + 1
            if count >= 2:
                raise ParseError("Module containing kernel {0} has more than "
                                 "one interface, this is forbidden in the "
                                 "LFRic API.".format(name))
            # Check the interfaces assigns one or more module procedures.
            if statement.a.module_procedures:
                iname = statement.name.lower()
                # If implicit interface (no name) set to none as there is no
                # procedure name for PSyclone to use.
                if iname == '':
                    iname = None
                else:
                    sname = [
                        str(sname).lower()
                        for sname in statement.a.module_procedures
                    ]
    return iname, sname
Exemple #23
0
    def check_invoke_label(self, argument):
        '''Takes the parse tree of an invoke argument containing an invoke
        label. Raises an exception if this label has already been used
        by another invoke in the same algorithm code. If all is well
        it returns the label as a string.

        :param argument: Parse tree of an invoke argument. This \
        should contain a "name=xxx" argument.
        :type argument: :py:class:`fparser.two.Actual_Arg_Spec`
        :returns: the label as a string.
        :rtype: str
        :raises ParseError: if this label has already been used by \
        another invoke in this algorithm code.

        '''
        invoke_label = get_invoke_label(argument, self._alg_filename)
        if invoke_label in self._unique_invoke_labels:
            raise ParseError("Found multiple named invoke()'s with the same "
                             "label ('{0}') when parsing {1}".format(
                                 invoke_label, self._alg_filename))
        self._unique_invoke_labels.append(invoke_label)
        return invoke_label
    def create(call, parent=None):
        ''' Create the objects needed for a call to the built-in
        described in the call (BuiltInCall) object '''

        if call.func_name not in BUILTIN_MAP:
            raise ParseError(
                "Unrecognised built-in call. Found '{0}' but expected "
                "one of '{1}'".format(call.func_name,
                                      list(BUILTIN_MAP_CAPITALISED.keys())))

        # Use our dictionary to get the correct Python object for
        # this built-in.
        builtin = BUILTIN_MAP[call.func_name]()

        # Create the loop over DoFs
        dofloop = DynLoop(parent=parent,
                          loop_type="dofs")

        # Use the call object (created by the parser) to set-up the state
        # of the infrastructure kernel
        builtin.load(call, parent=dofloop)

        # Check that our assumption that we're looping over DoFS is valid
        if builtin.iterates_over != "dofs":
            raise NotImplementedError(
                "In the Dynamo 0.3 API built-in calls must iterate over "
                "DoFs but found {0} for {1}".format(builtin.iterates_over,
                                                    str(builtin)))
        # Set-up its state
        dofloop.load(builtin)

        # As it is the innermost loop it has the kernel as a loop_body
        # child.
        dofloop.loop_body.addchild(builtin)
        builtin.parent = dofloop.loop_body

        # Return the outermost loop
        return dofloop
Exemple #25
0
    def create_coded_kernel_call(self, kernel_name, args):
        '''Takes a coded kernel name and a list of Arg objects which
        capture information about the coded call arguments and
        returns a KernelCall instance with content specific to the
        particular API (as specified in self._api).

        :param str kernel_name: the name of the coded kernel being \
        called
        :param args: a list of 'Arg' instances containing the required \
        information for the arguments being passed from the algorithm \
        layer. The list order is the same as the argument order.
        :type arg: list of :py:class:`psyclone.parse.algorithm.Arg`
        :returns: a KernelCall instance with information specific to \
        the API.
        :rtype: :py:class:`psyclone.parse.algorithm.KernelCall`
        :raises ParseError: if the kernel is not specified in a use \
        statement in the algorithm layer

        '''
        try:
            module_name = self._arg_name_to_module_name[kernel_name.lower()]
        except KeyError:
            raise ParseError(
                "kernel call '{0}' must either be named "
                "in a use "
                "statement (found {1}) or be a recognised built-in "
                "(one of '{2}' for this API)".format(
                    kernel_name, list(self._arg_name_to_module_name.values()),
                    list(self._builtin_name_map.keys())))

        from psyclone.parse.kernel import get_kernel_ast
        modast = get_kernel_ast(module_name, self._alg_filename,
                                self._kernel_path, self._line_length)
        from psyclone.parse.kernel import KernelTypeFactory
        return KernelCall(
            module_name,
            KernelTypeFactory(api=self._api).create(modast, name=kernel_name),
            args)
Exemple #26
0
    def _init_scalar(self, arg_type):
        '''
        Validates metadata descriptors for scalar arguments and
        initialises scalar argument properties accordingly.

        :param arg_type: LFRic API scalar argument type.
        :type arg_type: :py:class:`psyclone.expression.FunctionVar`

        :raises InternalError: if argument type other than a scalar is \
                               passed in.
        :raises ParseError: if there are not exactly 3 metadata arguments.
        :raises InternalError: if a scalar argument has an invalid data type.
        :raises ParseError: if scalar arguments do not have a read-only or
                            a reduction access.
        :raises ParseError: if a scalar argument that is not a real \
                            scalar has a reduction access.

        '''
        # Check whether something other than a scalar is passed in
        if self._argument_type not in LFRicArgDescriptor.VALID_SCALAR_NAMES:
            raise InternalError(
                "LFRicArgDescriptor._init_scalar(): expected a scalar "
                "argument but got an argument of type '{0}'.".format(
                    arg_type.args[0]))

        # There must be 3 argument descriptors to describe a scalar.
        # TODO in #874: Remove support for the old-style 2 descriptors.
        min_scalar_nargs = 2 + self._offset
        if self._nargs != min_scalar_nargs:
            raise ParseError(
                "In the LFRic API each 'meta_arg' entry must have {0} "
                "arguments if its first argument is 'gh_{{r,i}}scalar', but "
                "found {1} in '{2}'.".format(min_scalar_nargs, self._nargs,
                                             arg_type))

        # Check whether an invalid data type for a scalar argument is passed
        # in. Valid data types for scalars are valid data types in LFRic API.
        # TODO in #874: Remove the support for old-style scalar metadata that
        #               assigns the data type from the scalar name (the 1st
        #               argument).
        #               Note: The main check for the valid scalar data types
        #               will be ParseError in the class constructor and this
        #               scalar init method only needs to check for
        #               InternalError.
        if not self._data_type and self._offset == 0:
            self._data_type = arg_type.args[0].name
            # Translate the old-style argument type into the current one
            self._argument_type = "gh_scalar"
        if (self._data_type not in LFRicArgDescriptor.VALID_SCALAR_DATA_TYPES):
            raise InternalError(
                "LFRicArgDescriptor._init_scalar(): expected one of {0} "
                "as the data type but got '{1}'.".format(
                    LFRicArgDescriptor.VALID_SCALAR_DATA_TYPES,
                    self._data_type))

        # Test allowed accesses for scalars (read_only or reduction)
        scalar_accesses = [AccessType.READ] + \
            AccessType.get_valid_reduction_modes()
        # Convert generic access types to GH_* names for error messages
        api_config = Config.get().api_conf(API)
        rev_access_mapping = api_config.get_reverse_access_mapping()
        if self._access_type not in scalar_accesses:
            api_specific_name = rev_access_mapping[self._access_type]
            valid_reductions = AccessType.get_valid_reduction_names()
            raise ParseError(
                "In the LFRic API scalar arguments must have read-only "
                "('gh_read') or a reduction {0} access but found '{1}' "
                "in '{2}'.".format(valid_reductions, api_specific_name,
                                   arg_type))
        # Reduction access is currently only valid for real scalar arguments
        if self._data_type != "gh_real" and self._access_type in \
           AccessType.get_valid_reduction_modes():
            raise ParseError(
                "In the LFRic API a reduction access '{0}' is only valid "
                "with a real scalar argument, but a scalar argument with "
                "'{1}' data type was found in '{2}'.".format(
                    self._access_type.api_specific_name(), self._data_type,
                    arg_type))

        # Scalars don't have vector size
        self._vector_size = 0
Exemple #27
0
    def _init_operator(self, arg_type):
        '''
        Validates metadata descriptors for operator arguments and
        initialises operator argument properties accordingly.

        :param arg_type: LFRic API operator argument type.
        :type arg_type: :py:class:`psyclone.expression.FunctionVar`

        :raises InternalError: if argument type other than an operator is \
                               passed in.
        :raises ParseError: if there are not exactly 4 metadata arguments.
        :raises ParseError: if the function space to- is not one of the \
                            valid function spaces.
        :raises ParseError: if the function space from- is not one of the \
                            valid function spaces.
        :raises ParseError: if the operator argument has an invalid access.

        '''
        # Check whether something other than an operator is passed in
        if self._argument_type not in LFRicArgDescriptor.VALID_OPERATOR_NAMES:
            raise InternalError(
                "LFRicArgDescriptor._init_operator(): expected an "
                "operator argument but got an argument of type '{0}'.".format(
                    self._argument_type))

        # We expect 4 arguments with the 3rd and 4th each being a
        # function space
        if self._nargs != 4:
            raise ParseError(
                "In the LFRic API each 'meta_arg' entry must have 4 "
                "arguments if its first argument is an operator (one "
                "of {0}), but found {1} in '{2}'.".format(
                    LFRicArgDescriptor.VALID_OPERATOR_NAMES, self._nargs,
                    arg_type))

        # Operator data_type is "gh_real" for now, but will be determined by
        # metadata descriptor as the second argument in issue #817
        self._data_type = "gh_real"

        # Operator arguments need to have valid to- and from- function spaces
        if arg_type.args[2].name not in \
           FunctionSpace.VALID_FUNCTION_SPACE_NAMES:
            raise ParseError(
                "In the LFRic API the 3rd argument of a 'meta_arg' "
                "operator entry must be a valid function space name (one of "
                "{0}), but found '{1}' in '{2}'.".format(
                    FunctionSpace.VALID_FUNCTION_SPACE_NAMES,
                    arg_type.args[2].name, arg_type))
        self._function_space1 = arg_type.args[2].name
        if arg_type.args[3].name not in \
           FunctionSpace.VALID_FUNCTION_SPACE_NAMES:
            raise ParseError(
                "In the LFRic API the 4th argument of a 'meta_arg' "
                "operator entry must be a valid function space name (one "
                "of {0}), but found '{1}' in '{2}'.".format(
                    FunctionSpace.VALID_FUNCTION_SPACE_NAMES,
                    arg_type.args[3].name, arg_type))
        self._function_space2 = arg_type.args[3].name

        # Test allowed accesses for operators
        operator_accesses = [
            AccessType.READ, AccessType.WRITE, AccessType.READWRITE
        ]
        # Convert generic access types to GH_* names for error messages
        api_config = Config.get().api_conf(API)
        rev_access_mapping = api_config.get_reverse_access_mapping()
        op_acc_msg = [rev_access_mapping[acc] for acc in operator_accesses]
        if self._access_type not in operator_accesses:
            raise ParseError(
                "In the LFRic API, allowed accesses for operators are {0} "
                "because they behave as discontinuous quantities, but found "
                "'{1}' in '{2}'.".format(op_acc_msg,
                                         rev_access_mapping[self._access_type],
                                         arg_type))
Exemple #28
0
    def _init_field(self, arg_type, operates_on):
        '''
        Validates metadata descriptors for field arguments and
        initialises field argument properties accordingly.

        :param arg_type: LFRic API field (vector) argument type.
        :type arg_type: :py:class:`psyclone.expression.FunctionVar`
        :param operates_on: value of operates_on from the parsed kernel \
                            metadata (used for validation).
        :type operates_on: str

        :raises InternalError: if argument type other than a field is \
                               passed in.
        :raises ParseError: if there are fewer than 3 metadata arguments.
        :raises ParseError: if there are more than 4 metadata arguments.
        :raises ParseError: if the 3rd argument is not a valid function space.
        :raises ParseError: if the optional 4th argument is not a stencil \
                            specification or a mesh identifier (for \
                            inter-grid kernels).
        :raises ParseError: if a field passed to a kernel that operates on \
                            DoFs does not have a valid access \
                            (one of [READ, WRITE, READWRITE]).
        :raises ParseError: if a field on a discontinuous function space \
                            passed to a kernel that operates on cell-columns \
                            does not have a valid access (one of \
                            [READ, WRITE, READWRITE]).
        :raises ParseError: if a field on a continuous function space \
                            passed to a kernel that operates on cell-columns \
                            does not have a valid access (one of [READ, INC]).
        :raises InternalError: if an invalid value for operates_on is \
                               passed in.
        :raises ParseError: if a field with a stencil access is not read-only.

        '''
        # Check whether something other than a field is passed in
        if self._argument_type not in LFRicArgDescriptor.VALID_FIELD_NAMES:
            raise InternalError(
                "LFRicArgDescriptor._init_field(): expected a field "
                "argument but got an argument of type '{0}'.".format(
                    arg_type.args[0]))

        # There must be at least 3 arguments
        if self._nargs < 3:
            raise ParseError(
                "In the LFRic API each 'meta_arg' entry must have at "
                "least 3 arguments if its first argument is of {0} type, "
                "but found {1} in '{2}'.".format(
                    LFRicArgDescriptor.VALID_FIELD_NAMES, self._nargs,
                    arg_type))
        # There must be at most 4 arguments
        if self._nargs > 4:
            raise ParseError(
                "In the LFRic API each 'meta_arg' entry must have at "
                "most 4 arguments if its first argument is of {0} type, "
                "but found {1} in '{2}'.".format(
                    LFRicArgDescriptor.VALID_FIELD_NAMES, self._nargs,
                    arg_type))

        # Field data_type is "gh_real" for now, but will be determined by
        # metadata descriptor as the second argument in issue #817
        self._data_type = "gh_real"

        # The 3rd argument must be a valid function space name
        if arg_type.args[2].name not in \
           FunctionSpace.VALID_FUNCTION_SPACE_NAMES:
            raise ParseError(
                "In the LFRic API the 3rd argument of a 'meta_arg' "
                "entry must be a valid function space name (one of {0}) if "
                "its first argument is of {1} type, but found '{2}' in "
                "'{3}'.".format(FunctionSpace.VALID_FUNCTION_SPACE_NAMES,
                                LFRicArgDescriptor.VALID_FIELD_NAMES,
                                arg_type.args[2].name, arg_type))
        self._function_space1 = arg_type.args[2].name

        # The optional 4th argument is either a stencil specification
        # or a mesh identifier (for inter-grid kernels)
        if self._nargs == 4:
            try:
                if "stencil" in str(arg_type.args[3]):
                    self._stencil = get_stencil(
                        arg_type.args[3],
                        LFRicArgDescriptor.VALID_STENCIL_TYPES)
                elif "mesh" in str(arg_type.args[3]):
                    self._mesh = get_mesh(arg_type.args[3],
                                          LFRicArgDescriptor.VALID_MESH_TYPES)
                else:
                    raise ParseError("Unrecognised metadata entry")
            except ParseError as err:
                raise ParseError(
                    "In the LFRic API the 4th argument of a 'meta_arg' "
                    "field entry must be either a valid stencil specification"
                    "or a mesh identifier (for inter-grid kernels). However, "
                    "entry '{0}' raised the following error: {1}.".format(
                        arg_type, str(err)))

        # Test allowed accesses for fields
        field_disc_accesses = [
            AccessType.READ, AccessType.WRITE, AccessType.READWRITE
        ]
        field_cont_accesses = [AccessType.READ, AccessType.INC]
        # Convert generic access types to GH_* names for error messages
        api_config = Config.get().api_conf(API)
        rev_access_mapping = api_config.get_reverse_access_mapping()
        # Create a list of allowed accesses for use in error messages
        fld_disc_acc_msg = [
            rev_access_mapping[acc] for acc in field_disc_accesses
        ]
        fld_cont_acc_msg = [
            rev_access_mapping[acc] for acc in field_cont_accesses
        ]
        # Joint lists of valid function spaces for continuous fields
        fld_cont_spaces = (FunctionSpace.CONTINUOUS_FUNCTION_SPACES +
                           FunctionSpace.VALID_ANY_SPACE_NAMES)

        # Check accesses for kernels that operate on DoFs
        if operates_on == "dof":
            if self._access_type not in field_disc_accesses:
                raise ParseError(
                    "In the LFRic API, allowed field accesses for a "
                    "kernel that operates on DoFs are {0}, but found "
                    "'{1}' for '{2}' in '{3}'.".format(
                        fld_disc_acc_msg,
                        rev_access_mapping[self._access_type],
                        self._function_space1.lower(), arg_type))
        # Check accesses for kernels that operate on cell-columns
        elif operates_on == "cell_column":
            # Fields on discontinuous function spaces
            if (self._function_space1.lower()
                    in FunctionSpace.VALID_DISCONTINUOUS_NAMES
                    and self._access_type not in field_disc_accesses):
                raise ParseError(
                    "In the LFRic API, allowed accesses for fields on "
                    "discontinuous function spaces that are arguments to "
                    "kernels that operate on cell-columns are {0}, but found "
                    "'{1}' for '{2}' in '{3}'.".format(
                        fld_disc_acc_msg,
                        rev_access_mapping[self._access_type],
                        self._function_space1.lower(), arg_type))
            # Fields on continuous function spaces
            if (self._function_space1.lower() in fld_cont_spaces
                    and self._access_type not in field_cont_accesses):
                raise ParseError(
                    "In the LFRic API, allowed accesses for fields on "
                    "continuous function spaces that are arguments to "
                    "kernels that operate on cell-columns are {0}, but found "
                    "'{1}' for '{2}' in '{3}'.".format(
                        fld_cont_acc_msg,
                        rev_access_mapping[self._access_type],
                        self._function_space1.lower(), arg_type))
        # Raise an InternalError for an invalid value of operates-on
        else:
            from psyclone.dynamo0p3 import VALID_ITERATION_SPACES
            raise InternalError(
                "Invalid operates_on '{0}' in the kernel metadata (expected "
                "one of {1}).".format(operates_on, VALID_ITERATION_SPACES))

        # Test allowed accesses for fields that have stencil specification
        if self._stencil and self._access_type != AccessType.READ:
            raise ParseError(
                "In the LFRic API a field with a stencil access must be "
                "read-only ('{0}'), but found '{1}' in '{2}'.".format(
                    rev_access_mapping[AccessType.READ],
                    rev_access_mapping[self._access_type], arg_type))
Exemple #29
0
    def __init__(self, arg_type, operates_on):
        self._arg_type = arg_type
        # Initialise properties
        self._argument_type = None
        self._data_type = None
        self._function_space_to = None
        self._function_space_from = None
        self._function_space = None
        self._function_spaces = []
        # Set vector size to 1 (scalars set it to 0 in their validation)
        self._vector_size = 1
        # Initialise other internal arguments
        self._access_type = None
        self._function_space1 = None
        self._function_space2 = None
        self._stencil = None
        self._mesh = None
        self._nargs = 0
        # Initialise temporary "offset" internal argument required
        # to support the old and the current argument metadata style.
        # TODO in #874: Remove support the for the old-style metadata
        #               as well as this temporary argument.
        self._offset = 0

        # Check for correct type descriptor
        if arg_type.name != 'arg_type':
            raise ParseError(
                "In the LFRic API each 'meta_arg' entry must be of type "
                "'arg_type', but found '{0}'.".format(arg_type.name))

        # Check the first argument descriptor. If it is a binary operator
        # then it has to be a field vector with an "*n" appended where "*"
        # is a binary operator and "n > 1" is a vector size. If it is a
        # variable then it can be one of the other allowed argument types.
        argtype = None
        separator = None
        if isinstance(arg_type.args[0], expr.BinaryOperator):
            argtype = arg_type.args[0].toks[0]
            separator = arg_type.args[0].toks[1]
        else:
            argtype = arg_type.args[0]

        # First check for a valid argument type. It has to be a variable
        # (FunctionVar expression) and have a valid LFRic API argument name.
        if isinstance(argtype, expr.FunctionVar) and argtype.name in \
           LFRicArgDescriptor.VALID_ARG_TYPE_NAMES:
            self._argument_type = argtype.name
        else:
            raise ParseError(
                "In the LFRic API the 1st argument of a 'meta_arg' "
                "entry should be a valid argument type (one of {0}), "
                "but found '{1}' in '{2}'.".format(
                    LFRicArgDescriptor.VALID_ARG_TYPE_NAMES, argtype,
                    arg_type))

        # Check for a valid vector size in case of a binary
        # operator expression
        if separator:
            self._validate_vector_size(separator, arg_type)

        # The 2nd arg for scalars (1st for the old-style scalar metadata)
        # is the Fortran primitive type of their data.
        # TODO in issue #817: introduce data type for fields and operators,
        # too, and modify the ParseError accordingly.
        # Note: Here we also set internal "offset" argument required to
        #       support the old and the current argument metadata style.
        # TODO in #874: Remove support for the old-style metadata.
        if self._argument_type == "gh_scalar":
            dtype = arg_type.args[1].name
            if dtype in LFRicArgDescriptor.VALID_ARG_DATA_TYPES:
                self._data_type = dtype
                self._offset = 1
            else:
                raise ParseError(
                    "In the LFRic API the 2nd argument of a 'meta_arg' "
                    "scalar entry should be a valid data type (one of {0}), "
                    "but found '{1}' in '{2}'.".format(
                        LFRicArgDescriptor.VALID_ARG_DATA_TYPES, dtype,
                        self._argument_type))

        # Check number of args (in general and also for scalar arguments).
        # We require at least three (two for old-style metadata).
        # TODO in issue #874: Remove offset and restore this check below
        #                     the first check for the correct 'arg_type'
        #                     descriptor name.
        self._nargs = len(arg_type.args)
        min_nargs = 2 + self._offset
        if self._nargs < min_nargs:
            raise ParseError(
                "In the LFRic API each 'meta_arg' entry must have at least "
                "{0} args, but found {1} in '{2}'.".format(
                    min_nargs, self._nargs, arg_type))

        # The 3rd arg for scalars and 2nd arg for fields and operators is an
        # access descriptor (issue #817 will make the access descriptor a 3rd
        # argument for the fields and operators, too). Permitted accesses for
        # each argument type are dealt with in the related _validate methods.
        # Convert from GH_* names to the generic access type
        api_config = Config.get().api_conf(API)
        access_mapping = api_config.get_access_mapping()
        prop_ind = 1 + self._offset
        try:
            self._access_type = access_mapping[arg_type.args[prop_ind].name]
        except KeyError:
            valid_names = api_config.get_valid_accesses_api()
            raise ParseError(
                "In the LFRic API argument {0} of a 'meta_arg' entry "
                "must be a valid access descriptor (one of {1}), but found "
                "'{2}' in '{3}'.".format(prop_ind + 1, valid_names,
                                         arg_type.args[prop_ind].name,
                                         arg_type))

        # Check for the allowed iteration spaces from the parsed kernel
        # metadata
        from psyclone.dynamo0p3 import VALID_ITERATION_SPACES
        if operates_on not in VALID_ITERATION_SPACES:
            raise InternalError(
                "Expected operates_on in the kernel metadata to be one of "
                "{0} but got '{1}'.".format(VALID_ITERATION_SPACES,
                                            operates_on))

        # FIELD, OPERATOR and SCALAR argument type descriptors and checks
        if self._argument_type in LFRicArgDescriptor.VALID_FIELD_NAMES:
            # Validate field arguments
            self._init_field(arg_type, operates_on)

        elif self._argument_type in LFRicArgDescriptor.VALID_OPERATOR_NAMES:
            # Validate operator arguments
            self._init_operator(arg_type)

        elif self._argument_type in LFRicArgDescriptor.VALID_SCALAR_NAMES:
            # Validate scalar arguments
            self._init_scalar(arg_type)

        else:
            # We should never get to here if the checks are tight enough
            raise InternalError(
                "LFRicArgDescriptor.__init__(): failed argument validation "
                "for the 'meta_arg' entry '{0}', should not get to here.".
                format(arg_type))

        # Initialise the parent class
        super(LFRicArgDescriptor,
              self).__init__(self._access_type,
                             self._function_space1,
                             stencil=self._stencil,
                             mesh=self._mesh,
                             argument_type=self._argument_type)
Exemple #30
0
def get_invoke_label(parse_tree, alg_filename, identifier="name"):
    '''Takes an invoke argument contained in the parse_tree argument and
    returns the label specified within it.

    :param parse_tree: Parse tree of an invoke argument. This should \
    contains a "name=xxx" argument.
    :type parse_tree: :py:class:`fparser.two.Actual_Arg_Spec`
    :param str alg_filename: The file containing the algorithm code.
    :param str identifier: An optional string specifying the name \
    used to specify a named arguement. Defaults to 'name'.
    :returns: the label as a string.
    :rtype: str
    :except InternalError: if the form of the argument is not what was \
    expected.
    :except InternalError: if the number of items contained in the \
    argument is not what was expected.
    :except ParseError: if the name used for the named argument does \
    not match what was expected.
    :except ParseError: if the label is not specified as a string.
    :except ParseError: if the label is not a valid Fortran name \
    (after any white space has been replaced with '_').

    '''
    if not isinstance(parse_tree, Actual_Arg_Spec):
        raise InternalError(
            "algorithm.py:Parser:get_invoke_label: Expected a Fortran "
            "argument of the form name=xxx but found instance of "
            "'{0}'.".format(type(parse_tree)))

    if len(parse_tree.items) != 2:
        raise InternalError(
            "algorithm.py:Parser:get_invoke_label: Expected the Fortran "
            "argument to have two items but found "
            "'{0}'.".format(len(parse_tree.items)))

    ident = str(parse_tree.items[0])
    if ident.lower() != identifier.lower():
        raise ParseError(
            "algorithm.py:Parser:get_invoke_label Expected named identifier "
            "to be '{0}' but found '{1}'".format(identifier.lower(),
                                                 ident.lower()))

    if not isinstance(parse_tree.items[1], Char_Literal_Constant):
        raise ParseError(
            "algorithm.py:Parser:get_invoke_label The (optional) name of an "
            "invoke must be specified as a string, but found {0} in "
            "{1}".format(str(parse_tree.items[1]), alg_filename))

    invoke_label = parse_tree.items[1].items[0]
    invoke_label = invoke_label.lower()
    invoke_label = invoke_label.strip()
    if invoke_label[0] == '"' and invoke_label[-1] == '"' or \
       invoke_label[0] == "'" and invoke_label[-1] == "'":
        invoke_label = invoke_label[1:-1]
    invoke_label = invoke_label.replace(" ", "_")

    if not pattern_tools.abs_name.match(invoke_label):
        raise ParseError(
            "algorithm.py:Parser:get_invoke_label the (optional) name of an "
            "invoke must be a string containing a valid Fortran name (with "
            "any spaces replaced by underscores) but got '{0}' in file "
            "{1}".format(invoke_label, alg_filename))

    return invoke_label