Example #1
0
 def requires_gauss_quad(self):
     ''' Returns true if the metadata for this argument specifies that
         its gausian quadrature values should be passed into the
         routine. '''
     if self._arg.gauss_quad.lower() == ".true.":
         return True
     if self._arg.gauss_quad.lower() == ".false.":
         return False
     raise GenerationError("error: gaussian quadrature is not set to "
                           ".true. or .false.")
Example #2
0
 def requires_diff_basis(self):
     ''' Returns true if the metadata for this argument specifies that
         its differential basis function values should be passed into
         the routine. '''
     if self._arg.diff_basis.lower() == ".true.":
         return True
     if self._arg.diff_basis.lower() == ".false.":
         return False
     raise GenerationError("error: diff_basis is not set to .true. "
                           "or .false.")
Example #3
0
    def _upper_bound(self):
        ''' Returns the upper bound of this loop as a string '''
        schedule = self.ancestor(GOSchedule)
        if schedule.const_loop_bounds:
            index_offset = ""
            # Look for a child kernel in order to get the index offset.
            # Since this is the __str__ method we have no guarantee
            # what state we expect our object to be in so we allow
            # for the case where we don't have any child kernels.
            go_kernels = self.walk(self.children, GOKern)
            if go_kernels:
                index_offset = go_kernels[0].index_offset

            if self._loop_type == "inner":
                stop = schedule.iloop_stop
            else:
                stop = schedule.jloop_stop

            if index_offset:
                stop += (self._bounds_lookup[index_offset][self.field_space][
                    self._iteration_space][self._loop_type]["stop"])
            else:
                stop = "not yet set"
        else:
            if self.field_space == "every":
                # Bounds are independent of the grid-offset convention in use

                # We look-up the upper bounds by enquiring about the SIZE of
                # the array itself
                if self._loop_type == "inner":
                    stop = "SIZE(" + self.field_name + "%data, 1)"
                elif self._loop_type == "outer":
                    stop = "SIZE(" + self.field_name + "%data, 2)"

            else:
                # loop bounds are pulled from the field object which
                # is more straightforward for us but provides the
                # Fortran compiler with less information.
                stop = self.field_name

                if self._iteration_space.lower() == "internal_pts":
                    stop += "%internal"
                elif self._iteration_space.lower() == "all_pts":
                    stop += "%whole"
                else:
                    raise GenerationError(
                        "Unrecognised iteration space, {0}. "
                        "Cannot generate loop bounds.".format(
                            self._iteration_space))
                if self._loop_type == "inner":
                    stop += "%xstop"
                elif self._loop_type == "outer":
                    stop += "%ystop"
        return stop
Example #4
0
    def __init__(self, arg):

        if arg.grid_prop in GRID_PROPERTY_DICT:
            self._name = GRID_PROPERTY_DICT[arg.grid_prop]
        else:
            raise GenerationError("Unrecognised grid property specified. "
                                  "Expected one of {0} but found '{1}'".format(
                                      str(GRID_PROPERTY_DICT.keys()),
                                      arg.grid_prop))

        # This object always represents an argument that is a grid_property
        self._type = "grid_property"
Example #5
0
    def jloop_stop(self):
        '''Returns the variable name to use for the upper bound of outer
        loops if we're generating loops with constant bounds. Raises
        an error if constant bounds are not being used.

        '''
        if self._const_loop_bounds:
            return "jstop"
        else:
            raise GenerationError(
                "Refusing to supply name of outer loop upper bound "
                "because constant loop bounds are not being used.")
Example #6
0
def generate(filename, api=""):

    '''Generates an empty kernel subroutine with the required arguments
       and datatypes (which we call a stub) when presented with Kernel
       Metadata. This is useful for Kernel developers to make sure
       they are using the correct arguments in the correct order.  The
       Kernel Metadata must be presented in the standard Kernel
       format.

       :param str filename: The name of the file for which to create a \
               kernel stub for.
       :param str api: The name of the API for which to create a kernel \
              stub. Must be one of the supported stub APIs.

       :raise GenerationError: if an invalid stub API is specified.
       :raise IOError: if filename does not specify a file.
       :raise ParseError: if the given file could not be parsed.
    '''
    if api == "":
        api = Config.get().default_stub_api
    if api not in Config.get().supported_stub_apis:
        print("Unsupported API '{0}' specified. Supported API's are {1}.".
              format(api, Config.get().supported_stub_apis))
        raise GenerationError(
            "generate: Unsupported API '{0}' specified. Supported types are "
            "{1}.".format(api, Config.get().supported_stub_apis))

    if not os.path.isfile(filename):
        raise IOError("file '{0}' not found".format(filename))

    # drop cache
    fparser.one.parsefortran.FortranParser.cache.clear()
    fparser.logging.disable(fparser.logging.CRITICAL)
    try:
        ast = fparser.api.parse(filename, ignore_comments=False)

    except (fparser.common.utils.AnalyzeError, AttributeError) as error:
        raise ParseError("Code appears to be invalid Fortran: " +
                         str(error))

    metadata = DynKernMetadata(ast)
    kernel = DynKern()
    kernel.load_meta(metadata)
    return kernel.gen_stub
Example #7
0
    def set_options(options):
        '''Sets the option the user required.
        :param options: List of options selected by the user, or None to
                        disable all automatic profiling.
        :type options: List of strings.
        :raises GenerationError: If any option is not KERNELS or INVOKES.
        '''
        # Test that all options are valid
        if options is None:
            options = []  # Makes it easier to test
        for index, option in enumerate(options):
            if option not in [Profiler.INVOKES, Profiler.KERNELS]:
                # Create a 'nice' representation of the allowed options.
                # [1:-1] cuts out the '[' and ']' that surrounding the
                # string of the list.
                allowed_options = str(Profiler.SUPPORTED_OPTIONS)[1:-1]
                raise GenerationError("Error in Profiler.setOptions: options "
                                      "must be one of {0} but found '{1}' "
                                      "at {2}".format(allowed_options,
                                                      str(option), index))

        # Store options so they can be queried later
        Profiler._options = options
Example #8
0
def handle_script(script_name, psy):
    '''Loads and applies the specified script to the given psy layer.
    The 'trans' function of the script is called with psy as parameter.
    :param script_name: Name of the script to load.
    :type script_name: string
    :param psy: The psy layer to which the script is applied.
    :type psy: :py:class:`psyclone.psyGen.PSy`
    :raises IOError: If the file is not found.
    :raises GenerationError: if the file does not have .py extension
        or can not be imported.
    :raises GenerationError: if trans() can not be called.
    :raises GenerationError: if any exception is raised when trans()
        was called.
    '''
    sys_path_appended = False
    try:
        # a script has been provided
        filepath, filename = os.path.split(script_name)
        if filepath:
            # a path to a file has been provided
            # we need to check the file exists
            if not os.path.isfile(script_name):
                raise IOError(
                    "script file '{0}' not found".format(script_name))
            # it exists so we need to add the path to the python
            # search path
            sys_path_appended = True
            sys.path.append(filepath)
        filename, fileext = os.path.splitext(filename)
        if fileext != '.py':
            raise GenerationError(
                "generator: expected the script file '{0}' to have "
                "the '.py' extension".format(filename))
        try:
            transmod = __import__(filename)
        except ImportError:
            raise GenerationError(
                "generator: attempted to import '{0}' but script file "
                "'{1}' has not been found".format(filename, script_name))
        except SyntaxError:
            raise GenerationError(
                "generator: attempted to import '{0}' but script file "
                "'{1}' is not valid python".format(filename, script_name))
        if callable(getattr(transmod, 'trans', None)):
            try:
                psy = transmod.trans(psy)
            except Exception:
                exc_type, exc_value, exc_traceback = sys.exc_info()
                lines = traceback.format_exception(exc_type, exc_value,
                                                   exc_traceback)
                e_str = '{\n' +\
                    ''.join('    ' + line for line in lines[2:]) + '}'
                raise GenerationError(
                    "Generator: script file '{0}'\nraised the "
                    "following exception during execution "
                    "...\n{1}\nPlease check your script".format(
                        script_name, e_str))
        else:
            raise GenerationError(
                "generator: attempted to import '{0}' but script file "
                "'{1}' does not contain a 'trans()' function".format(
                    filename, script_name))
    except Exception as msg:
        if sys_path_appended:
            os.sys.path.pop()
        raise msg
    if sys_path_appended:
        os.sys.path.pop()
Example #9
0
def generate(filename,
             api="",
             kernel_path="",
             script_name=None,
             line_length=False,
             distributed_memory=None,
             kern_out_path="",
             kern_naming="multiple"):
    # pylint: disable=too-many-arguments
    '''Takes a PSyclone algorithm specification as input and outputs the
    associated generated algorithm and psy codes suitable for
    compiling with the specified kernel(s) and support
    infrastructure. Uses the :func:`parse.algorithm.parse` function to
    parse the algorithm specification, the :class:`psyGen.PSy` class
    to generate the PSy code and the :class:`alg_gen.Alg` class to
    generate the modified algorithm code.

    :param str filename: The file containing the algorithm specification.
    :param str kernel_path: The directory from which to recursively \
                            search for the files containing the kernel \
                            source (if different from the location of the \
                            algorithm specification).
    :param str script_name: A script file that can apply optimisations \
                            to the PSy layer (can be a path to a file or \
                            a filename that relies on the PYTHONPATH to \
                            find the module).
    :param bool line_length: A logical flag specifying whether we care \
                             about line lengths being longer than 132 \
                             characters. If so, the input (algorithm \
                             and kernel) code is checked to make sure \
                             that it conforms. The default is False.
    :param bool distributed_memory: A logical flag specifying whether to \
                                    generate distributed memory code. The \
                                    default is set in the config.py file.
    :param str kern_out_path: Directory to which to write transformed \
                              kernel code.
    :param bool kern_naming: the scheme to use when re-naming transformed \
                             kernels.
    :return: 2-tuple containing fparser1 ASTs for the algorithm code and \
             the psy code.
    :rtype: (:py:class:`fparser.one.block_statements.BeginSource`, \
             :py:class:`fparser.one.block_statements.Module`)

    :raises IOError: if the filename or search path do not exist
    :raises GenerationError: if an invalid API is specified.
    :raises GenerationError: if an invalid kernel-renaming scheme is specified.

    For example:

    >>> from psyclone.generator import generate
    >>> alg, psy = generate("algspec.f90")
    >>> alg, psy = generate("algspec.f90", kernel_path="src/kernels")
    >>> alg, psy = generate("algspec.f90", script_name="optimise.py")
    >>> alg, psy = generate("algspec.f90", line_length=True)
    >>> alg, psy = generate("algspec.f90", distributed_memory=False)

    '''

    if distributed_memory is None:
        distributed_memory = Config.get().distributed_memory

    # pylint: disable=too-many-statements, too-many-locals, too-many-branches
    if api == "":
        api = Config.get().default_api
    else:
        if api not in Config.get().supported_apis:
            raise GenerationError(
                "generate: Unsupported API '{0}' specified. Supported "
                "types are {1}.".format(api,
                                        Config.get().supported_apis))

    # Store Kernel-output options in our Configuration object
    Config.get().kernel_output_dir = kern_out_path
    try:
        Config.get().kernel_naming = kern_naming
    except ValueError as verr:
        raise GenerationError(
            "Invalid kernel-renaming scheme supplied: {0}".format(str(verr)))

    if not os.path.isfile(filename):
        raise IOError("file '{0}' not found".format(filename))
    if kernel_path and not os.access(kernel_path, os.R_OK):
        raise IOError("kernel search path '{0}' not found".format(kernel_path))
    try:
        from psyclone.alg_gen import Alg
        ast, invoke_info = parse(filename,
                                 api=api,
                                 invoke_name="invoke",
                                 kernel_path=kernel_path,
                                 line_length=line_length)
        psy = PSyFactory(api, distributed_memory=distributed_memory)\
            .create(invoke_info)
        if script_name is not None:
            handle_script(script_name, psy)

        if api not in API_WITHOUT_ALGORITHM:
            alg_gen = Alg(ast, psy).gen
        else:
            alg_gen = None
    except Exception:
        raise

    return alg_gen, psy.gen
Example #10
0
def adduse(parse_tree, location, name, only=None, funcnames=None):
    '''Add a Fortran 'use' statement to an existing fparser2 parse
    tree. This will be added at the first valid location before the
    current location.

    This function should be part of the fparser2 replacement for
    f2pygen (which uses fparser1) but is kept here until this is
    developed, see issue #240.

    The 'parse_tree' argument is only required as fparser2 currently
    does not connect a child to a parent. This will be addressed in
    issue fparser:#102.

    :param parse_tree: The full parse tree of the associated code
    :type parse_tree: :py:class:`fparser.two.utils.Base`
    :param location: The current location (node) in the parse tree \
    provided in the parse_tree argument
    :type location: :py:class:`fparser.two.utils.Base`
    :param str name: The name of the use statement
    :param bool only: Whether to include the 'only' clause in the use \
    statement or not. Defaults to None which will result in only being \
    added if funcnames has content and not being added otherwise.
    :param funcnames: A list of names to include in the use statement's \
    only list. If the list is empty or None then nothing is \
    added. Defaults to None.
    :type funcnames: list of str

    :raises GenerationError: if the location is not part of the parse \
    tree.
    :raises GenerationError: if the location is not a valid location \
    to add a use statement.
    :raises NotImplementedError: if the type of parent node is not \
    supported.
    :raises InternalError: if the type of parent node does not have \
    the expected structure.

    '''
    # pylint: disable=too-many-locals
    # pylint: disable=too-many-branches
    from fparser.two.utils import walk_ast
    from fparser.two.Fortran2003 import Main_Program, Module, \
        Subroutine_Subprogram, Function_Subprogram, Use_Stmt, \
        Specification_Part
    from psyclone.psyGen import GenerationError, InternalError

    if location is None:
        raise GenerationError("alg_gen.py:adduse: Location argument must "
                              "not be None.")
    if funcnames:
        # funcnames have been provided for the only clause.
        if only is False:
            # However, the only clause has been explicitly set to False.
            raise GenerationError(
                "alg_gen.py:adduse: If the 'funcnames' argument is provided "
                "and has content, then the 'only' argument must not be set "
                "to 'False'.")
        if only is None:
            # only has not been specified so set it to True as it is
            # required when funcnames has content.
            only = True

    if only is None:
        # only has not been specified and we can therefore infer that
        # funcnames is empty or is not provided (as earlier code would
        # have set only to True otherwise) so only is not required.
        only = False

    # Create the specified use statement
    only_str = ""
    if only:
        only_str = ", only :"
    my_funcnames = funcnames
    if funcnames is None:
        my_funcnames = []
    use = Use_Stmt("use {0}{1} {2}".format(name, only_str,
                                           ", ".join(my_funcnames)))

    # find the parent program statement containing the specified location
    parent_prog_statement = None
    found = False
    for child in walk_ast(parse_tree.content):
        if child == location:
            found = True
            break
        if isinstance(child, (Main_Program, Module, Subroutine_Subprogram,
                              Function_Subprogram)):
            parent_prog_statement = child

    if not found:
        raise GenerationError("alg_gen.py:adduse: The specified location is "
                              "not in the parse tree.")
    if not parent_prog_statement:
        raise GenerationError(
            "alg_gen.py:adduse: The specified location is invalid as it has "
            "no parent in the parse tree that is a program, module, "
            "subroutine or function.")
    if not isinstance(
            parent_prog_statement,
        (Main_Program, Subroutine_Subprogram, Function_Subprogram)):
        # We currently only support program, subroutine and function
        # as ancestors
        raise NotImplementedError(
            "alg_gen.py:adduse: Unsupported parent code found '{0}'. "
            "Currently support is limited to program, subroutine and "
            "function.".format(str(type(parent_prog_statement))))
    if not isinstance(parent_prog_statement.content[1], Specification_Part):
        raise InternalError(
            "alg_gen.py:adduse: The second child of the parent code "
            "(content[1]) is expected to be a specification part but "
            "found '{0}'.".format(repr(parent_prog_statement.content[1])))

    # add the use statement as the first child of the specification
    # part of the program
    spec_part = parent_prog_statement.content[1]
    spec_part.content.insert(0, use)

    return parse_tree
Example #11
0
 def create():
     ''' Placeholder to create a GOocean-specific built-in call.
     This will require us to create a doubly-nested loop and then create
     the body of the particular built-in operation. '''
     raise GenerationError(
         "Built-ins are not supported for the GOcean 1.0 API")
Example #12
0
    def __init__(self, parent=None, topology_name="", loop_type=""):
        Loop.__init__(self, parent=parent, valid_loop_types=VALID_LOOP_TYPES)
        self.loop_type = loop_type

        # We set the loop variable name in the constructor so that it is
        # available when we're determining which vars should be OpenMP
        # PRIVATE (which is done *before* code generation is performed)
        if self.loop_type == "inner":
            self._variable_name = "i"
        elif self.loop_type == "outer":
            self._variable_name = "j"
        else:
            raise GenerationError(
                "Invalid loop type of '{0}'. Expected one of {1}".format(
                    self._loop_type, VALID_LOOP_TYPES))

        # Create a dictionary to simplify the business of looking-up
        # loop bounds
        self._bounds_lookup = {}
        for grid_offset in SUPPORTED_OFFSETS:
            self._bounds_lookup[grid_offset] = {}
            for gridpt_type in VALID_FIELD_GRID_TYPES:
                self._bounds_lookup[grid_offset][gridpt_type] = {}
                for itspace in VALID_ITERATES_OVER:
                    self._bounds_lookup[grid_offset][gridpt_type][itspace] = {}

        # Loop bounds for a mesh with NE offset
        self._bounds_lookup['offset_ne']['ct']['all_pts'] = \
            {'inner': {'start': "1", 'stop': "+1"},
             'outer': {'start': "1", 'stop': "+1"}}
        self._bounds_lookup['offset_ne']['ct']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': ""},
             'outer': {'start': "2", 'stop': ""}}
        self._bounds_lookup['offset_ne']['cu']['all_pts'] = \
            {'inner': {'start': "1", 'stop': ""},
             'outer': {'start': "1", 'stop': "+1"}}
        self._bounds_lookup['offset_ne']['cu']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': "-1"},
             'outer': {'start': "2", 'stop': ""}}
        self._bounds_lookup['offset_ne']['cv']['all_pts'] = \
            {'inner': {'start': "1", 'stop': "+1"},
             'outer': {'start': "1", 'stop': ""}}
        self._bounds_lookup['offset_ne']['cv']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': ""},
             'outer': {'start': "2", 'stop': "-1"}}
        self._bounds_lookup['offset_ne']['cf']['all_pts'] = \
            {'inner': {'start': "1", 'stop': ""},
             'outer': {'start': "1", 'stop': ""}}
        self._bounds_lookup['offset_ne']['cf']['internal_pts'] = \
            {'inner': {'start': "1", 'stop': "-1"},
             'outer': {'start': "1", 'stop': "-1"}}
        # Loop bounds for a mesh with SE offset
        self._bounds_lookup['offset_sw']['ct']['all_pts'] = \
            {'inner': {'start': "1", 'stop': "+1"},
             'outer': {'start': "1", 'stop': "+1"}}
        self._bounds_lookup['offset_sw']['ct']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': ""},
             'outer': {'start': "2", 'stop': ""}}
        self._bounds_lookup['offset_sw']['cu']['all_pts'] = \
            {'inner': {'start': "1", 'stop': "+1"},
             'outer': {'start': "1", 'stop': "+1"}}
        self._bounds_lookup['offset_sw']['cu']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': "+1"},
             'outer': {'start': "2", 'stop': ""}}
        self._bounds_lookup['offset_sw']['cv']['all_pts'] = \
            {'inner': {'start': "1", 'stop': "+1"},
             'outer': {'start': "1", 'stop': "+1"}}
        self._bounds_lookup['offset_sw']['cv']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': ""},
             'outer': {'start': "2", 'stop': "+1"}}
        self._bounds_lookup['offset_sw']['cf']['all_pts'] = \
            {'inner': {'start': "1", 'stop': "+1"},
             'outer': {'start': "1", 'stop': "+1"}}
        self._bounds_lookup['offset_sw']['cf']['internal_pts'] = \
            {'inner': {'start': "2", 'stop': "+1"},
             'outer': {'start': "2", 'stop': "+1"}}
        # For offset 'any'
        for gridpt_type in VALID_FIELD_GRID_TYPES:
            for itspace in VALID_ITERATES_OVER:
                self._bounds_lookup['offset_any'][gridpt_type][itspace] = \
                    {'inner': {'start': "1", 'stop': ""},
                     'outer': {'start': "1", 'stop': ""}}
        # For 'every' grid-point type
        for offset in SUPPORTED_OFFSETS:
            for itspace in VALID_ITERATES_OVER:
                self._bounds_lookup[offset]['every'][itspace] = \
                    {'inner': {'start': "1", 'stop': "+1"},
                     'outer': {'start': "1", 'stop': "+1"}}
Example #13
0
    def gen_code(self, parent):
        ''' Generates dynamo version 0.1 specific psy code for a call to
            the dynamo kernel instance. '''
        from psyclone.f2pygen import CallGen, DeclGen, AssignGen, UseGen

        # TODO: we simply choose the first field as the lookup for the moment
        field_name = self.arguments.args[0].name

        # add a dofmap lookup using first field.
        # TODO: This needs to be generalised to work for multiple dofmaps
        parent.add(
            CallGen(parent, field_name + "%vspace%get_cell_dofmap",
                    ["cell", "map"]))
        parent.add(DeclGen(parent, datatype="integer", entity_decls=["cell"]))
        parent.add(
            DeclGen(parent,
                    datatype="integer",
                    pointer=True,
                    entity_decls=["map(:)"]))

        # create the argument list on the fly so we can also create
        # appropriate variables and lookups
        arglist = []
        arglist.append("nlayers")
        arglist.append("ndf")
        arglist.append("map")

        found_gauss_quad = False
        gauss_quad_arg = None
        for arg in self._arguments.args:
            if arg.requires_basis:
                basis_name = arg.function_space + "_basis_" + arg.name
                arglist.append(basis_name)
                new_parent, position = parent.start_parent_loop()
                new_parent.add(CallGen(new_parent,
                                       field_name + "%vspace%get_basis",
                                       [basis_name]),
                               position=["before", position])
                parent.add(
                    DeclGen(parent,
                            datatype="real",
                            kind="dp",
                            pointer=True,
                            entity_decls=[basis_name + "(:,:,:,:,:)"]))
            if arg.requires_diff_basis:
                raise GenerationError("differential basis has not yet "
                                      "been coded")
            if arg.requires_gauss_quad:
                if found_gauss_quad:
                    raise GenerationError("found more than one gaussian "
                                          "quadrature in this kernel")
                found_gauss_quad = True
                gauss_quad_arg = arg
            dataref = "%data"
            arglist.append(arg.name + dataref)

        if found_gauss_quad:
            gq_name = "gaussian_quadrature"
            arglist.append(gauss_quad_arg.name + "%" + gq_name)

        # generate the kernel call and associated use statement
        parent.add(CallGen(parent, self._name, arglist))
        if not self.module_inline:
            parent.add(
                UseGen(parent,
                       name=self._module_name,
                       only=True,
                       funcnames=[self._name]))

        # declare and initialise the number of layers and the number
        # of degrees of freedom. Needs to be generalised.
        parent.add(
            DeclGen(parent,
                    datatype="integer",
                    entity_decls=["nlayers", "ndf"]))
        new_parent, position = parent.start_parent_loop()
        new_parent.add(AssignGen(new_parent,
                                 lhs="nlayers",
                                 rhs=field_name + "%get_nlayers()"),
                       position=["before", position])
        new_parent.add(AssignGen(new_parent,
                                 lhs="ndf",
                                 rhs=field_name + "%vspace%get_ndf()"),
                       position=["before", position])
Example #14
0
def generate(filename,
             api="",
             kernel_path="",
             script_name=None,
             line_length=False,
             distributed_memory=DISTRIBUTED_MEMORY):
    # pylint: disable=too-many-arguments
    '''Takes a GungHo algorithm specification as input and outputs the
    associated generated algorithm and psy codes suitable for
    compiling with the specified kernel(s) and GungHo
    infrastructure. Uses the :func:`parse.parse` function to parse the
    algorithm specification, the :class:`psyGen.PSy` class to generate
    the PSy code and the :class:`algGen.Alg` class to generate the
    modified algorithm code.

    :param str filename: The file containing the algorithm specification.
    :param str kernel_path: The directory from which to recursively
                            search for the files containing the kernel
                            source (if different from the location of the
                            algorithm specification)
    :param str script_name: A script file that can apply optimisations
                            to the PSy layer (can be a path to a file or
                            a filename that relies on the PYTHONPATH to
                            find the module).
    :param bool line_length: A logical flag specifying whether we care
                             about line lengths being longer than 132
                             characters. If so, the input (algorithm
                             and kernel) code is checked to make sure
                             that it conforms. The default is False.
    :param bool distributed_memory: A logical flag specifying whether to
                                    generate distributed memory code. The
                                    default is set in the config.py file.
    :return: The algorithm code and the psy code.
    :rtype: ast
    :raises IOError: if the filename or search path do not exist

    For example:

    >>> from generator import generate
    >>> alg, psy = generate("algspec.f90")
    >>> alg, psy = generate("algspec.f90", kernel_path="src/kernels")
    >>> alg, psy = generate("algspec.f90", script_name="optimise.py")
    >>> alg, psy = generate("algspec.f90", line_length=True)
    >>> alg, psy = generate("algspec.f90", distributed_memory=False)

    '''

    if api == "":
        api = DEFAULTAPI
    else:
        if api not in SUPPORTEDAPIS:
            raise GenerationError(
                "generate: Unsupported API '{0}' specified. Supported "
                "types are {1}.".format(api, SUPPORTEDAPIS))

    if not os.path.isfile(filename):
        raise IOError("file '{0}' not found".format(filename))
    if (len(kernel_path) > 0) and (not os.access(kernel_path, os.R_OK)):
        raise IOError("kernel search path '{0}' not found".format(kernel_path))
    try:
        from psyclone.algGen import Alg
        ast, invoke_info = parse(filename,
                                 api=api,
                                 invoke_name="invoke",
                                 kernel_path=kernel_path,
                                 line_length=line_length)
        psy = PSyFactory(api, distributed_memory=distributed_memory).\
            create(invoke_info)
        if script_name is not None:
            handle_script(script_name, psy)
        alg = Alg(ast, psy)
    except Exception:
        raise
    return alg.gen, psy.gen