Exemple #1
0
def test_dag_names():
    ''' Test that the dag_name method returns the correct value for the
    node class and its specialisations. '''
    _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    assert super(Schedule, schedule).dag_name == "node_0"
    assert schedule.dag_name == "schedule_0"
    assert schedule.children[0].dag_name == "checkHaloExchange(f1)_0"
    assert schedule.children[4].dag_name == "loop_5"
    schedule.children[4].loop_type = "colour"
    assert schedule.children[4].dag_name == "loop_[colour]_5"
    schedule.children[4].loop_type = ""
    assert (schedule.children[4].loop_body[0].dag_name ==
            "kernel_testkern_code_10")
    _, invoke_info = parse(os.path.join(
        BASE_PATH, "15.14.3_sum_setval_field_builtin.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    global_sum = schedule.children[2]
    assert global_sum.dag_name == "globalsum(asum)_2"
    builtin = schedule.children[1].loop_body[0]
    assert builtin.dag_name == "builtin_sum_x_12"
Exemple #2
0
def test_kern_builtin_no_loop():
    ''' Test that applying Extract Transformation on a Kernel or Built-in
    call without its parent Loop raises a TransformationError. '''

    # Test Dynamo0.3 API for Built-in call error
    dynetrans = DynamoExtractRegionTrans()
    _, invoke_info = parse(os.path.join(
        DYNAMO_BASE_PATH, "15.1.2_builtin_and_normal_kernel_invoke.f90"),
                           api=DYNAMO_API)
    psy = PSyFactory(DYNAMO_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    # Test Built-in call
    builtin_call = schedule.children[1].loop_body[0]
    with pytest.raises(TransformationError) as excinfo:
        _, _ = dynetrans.apply(builtin_call)
    assert ("Extraction of a Kernel or a Built-in call without its "
            "parent Loop is not allowed.") in str(excinfo)

    # Test GOcean1.0 API for Kernel call error
    gocetrans = GOceanExtractRegionTrans()
    _, invoke_info = parse(os.path.join(GOCEAN_BASE_PATH,
                                        "single_invoke_three_kernels.f90"),
                           api=GOCEAN_API)
    psy = PSyFactory(GOCEAN_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    # Test Kernel call
    kernel_call = schedule.children[0].loop_body[0].loop_body[0]
    with pytest.raises(TransformationError) as excinfo:
        _, _ = gocetrans.apply(kernel_call)
    assert ("Extraction of a Kernel or a Built-in call without its "
            "parent Loop is not allowed.") in str(excinfo)
Exemple #3
0
def test_node_forward_dependence():
    '''Test that the Node class forward_dependence method returns the
    closest dependent Node after the current Node in the schedule or
    None if none are found.'''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "15.14.1_multi_aX_plus_Y_builtin.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    read4 = schedule.children[4]
    # 1: returns none if none found
    # a) check many reads
    assert not read4.forward_dependence()
    # b) check no dependencies for a call
    assert not read4.children[0].forward_dependence()
    # 2: returns first dependent kernel arg when there are many
    # dependencies
    # a) check first read returned
    writer = schedule.children[3]
    next_read = schedule.children[4]
    assert writer.forward_dependence() == next_read
    # a) check writer returned
    first_loop = schedule.children[0]
    assert first_loop.forward_dependence() == writer
    # 3: haloexchange dependencies
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "4.5_multikernel_invokes.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    prev_loop = schedule.children[7]
    halo_field = schedule.children[8]
    next_loop = schedule.children[9]
    # a) previous loop depends on halo exchange
    assert prev_loop.forward_dependence() == halo_field
    # b) halo exchange depends on following loop
    assert halo_field.forward_dependence() == next_loop

    # 4: globalsum dependencies
    _, invoke_info = parse(os.path.join(
        BASE_PATH, "15.14.3_sum_setval_field_builtin.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    prev_loop = schedule.children[0]
    sum_loop = schedule.children[1]
    global_sum_loop = schedule.children[2]
    next_loop = schedule.children[3]
    # a) prev loop depends on sum loop
    assert prev_loop.forward_dependence() == sum_loop
    # b) sum loop depends on global sum loop
    assert sum_loop.forward_dependence() == global_sum_loop
    # c) global sum loop depends on next loop
    assert global_sum_loop.forward_dependence() == next_loop
Exemple #4
0
def test_node_backward_dependence():
    '''Test that the Node class backward_dependence method returns the
    closest dependent Node before the current Node in the schedule or
    None if none are found.'''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "15.14.1_multi_aX_plus_Y_builtin.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    # 1: loop no backwards dependence
    loop3 = schedule.children[2]
    assert not loop3.backward_dependence()
    # 2: loop to loop backward dependence
    # a) many steps
    last_loop_node = schedule.children[6]
    prev_dep_loop_node = schedule.children[3]
    assert last_loop_node.backward_dependence() == prev_dep_loop_node
    # b) previous
    assert prev_dep_loop_node.backward_dependence() == loop3
    # 3: haloexchange dependencies
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "4.5_multikernel_invokes.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    loop2 = schedule.children[7]
    halo_exchange = schedule.children[8]
    loop3 = schedule.children[9]
    # a) following loop node depends on halo exchange node
    result = loop3.backward_dependence()
    assert result == halo_exchange
    # b) halo exchange node depends on previous loop node
    result = halo_exchange.backward_dependence()
    assert result == loop2
    # 4: globalsum dependencies
    _, invoke_info = parse(os.path.join(
        BASE_PATH, "15.14.3_sum_setval_field_builtin.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    loop1 = schedule.children[0]
    loop2 = schedule.children[1]
    global_sum = schedule.children[2]
    loop3 = schedule.children[3]
    # a) loop3 depends on global sum
    assert loop3.backward_dependence() == global_sum
    # b) global sum depends on loop2
    assert global_sum.backward_dependence() == loop2
    # c) loop2 (sum) depends on loop1
    assert loop2.backward_dependence() == loop1
Exemple #5
0
def test_extract_single_builtin_dynamo0p3():
    ''' Test that extraction of a BuiltIn in an Invoke produces the
    correct result in Dynamo0.3 API without and with optimisations. '''
    from psyclone.transformations import DynamoOMPParallelLoopTrans

    etrans = DynamoExtractRegionTrans()
    otrans = DynamoOMPParallelLoopTrans()

    # Test extract without optimisations
    _, invoke_info = parse(os.path.join(
        DYNAMO_BASE_PATH, "15.1.2_builtin_and_normal_kernel_invoke.f90"),
                           api=DYNAMO_API)
    psy = PSyFactory(DYNAMO_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule

    schedule, _ = etrans.apply(schedule.children[1])
    code = str(psy.gen)
    output = ("      ! ExtractStart\n"
              "      ! CALL write_extract_arguments(argument_list)\n"
              "      !\n"
              "      DO df=1,undf_any_space_1_f2\n"
              "        f2_proxy%data(df) = 0.0\n"
              "      END DO \n"
              "      !\n"
              "      ! ExtractEnd\n")
    assert output in code

    # Test extract with OMP Parallel optimisation
    _, invoke_info = parse(os.path.join(
        DYNAMO_BASE_PATH, "15.1.1_builtin_and_normal_kernel_invoke_2.f90"),
                           api=DYNAMO_API)
    psy = PSyFactory(DYNAMO_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule

    schedule, _ = otrans.apply(schedule.children[1])
    schedule, _ = etrans.apply(schedule.children[1])
    code = str(psy.gen)
    output = ("      ! ExtractStart\n"
              "      ! CALL write_extract_arguments(argument_list)\n"
              "      !\n"
              "      !$omp parallel do default(shared), private(df), "
              "schedule(static)\n"
              "      DO df=1,undf_any_space_1_f1\n"
              "        f1_proxy%data(df) = 0.5*f1_proxy%data(df) + "
              "f2_proxy%data(df)\n"
              "      END DO \n"
              "      !$omp end parallel do\n"
              "      !\n"
              "      ! ExtractEnd\n")
    assert output in code
Exemple #6
0
def test_extract_node_position():
    ''' Test that Extract Transformation inserts the ExtractNode
    at the position of the first Node a Schedule in the Node list
    marked for extraction. '''

    # Test GOcean1.0 API for extraction of a single Node
    gocetrans = GOceanExtractRegionTrans()
    _, invoke_info = parse(os.path.join(GOCEAN_BASE_PATH,
                                        "single_invoke_three_kernels.f90"),
                           api=GOCEAN_API)
    psy = PSyFactory(GOCEAN_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    # Apply Extract transformation to the second Node and assert that
    # position and the absolute position of the ExtractNode are the same as
    # respective positions of the second Node before the transformation.
    pos = 1
    child = schedule.children[pos]
    abspos = child.abs_position
    dpth = child.depth
    schedule, _ = gocetrans.apply(child)
    extract_node = schedule.walk(ExtractNode)
    # The result is only one ExtractNode in the list with position 1
    assert extract_node[0].position == pos
    assert extract_node[0].abs_position == abspos
    assert extract_node[0].depth == dpth

    # Test Dynamo0.3 API for extraction of a list of Nodes
    dynetrans = DynamoExtractRegionTrans()
    _, invoke_info = parse(os.path.join(
        DYNAMO_BASE_PATH, "15.1.2_builtin_and_normal_kernel_invoke.f90"),
                           api=DYNAMO_API)
    psy = PSyFactory(DYNAMO_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    # Apply Extract transformation to the first three Nodes and assert that
    # position and the absolute position of the ExtractNode are the same as
    # respective positions of the first Node before the transformation.
    pos = 0
    children = schedule.children[pos:pos + 3]
    abspos = children[0].abs_position
    dpth = children[0].depth
    schedule, _ = dynetrans.apply(children)
    extract_node = schedule.walk(ExtractNode)
    # The result is only one ExtractNode in the list with position 0
    assert extract_node[0].position == pos
    assert extract_node[0].abs_position == abspos
    assert extract_node[0].depth == dpth
Exemple #7
0
def inline():
    ''' function exercising the module-inline transformation '''
    from psyclone.parse.algorithm import parse
    from psyclone.psyGen import PSyFactory
    import os
    from psyclone.transformations import KernelModuleInlineTrans

    _, info = parse(os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                 "..", "..", "..", "src", "psyclone", "tests",
                                 "test_files", "dynamo0p1", "algorithm",
                                 "1_single_function.f90"),
                    api="dynamo0.1")
    psy = PSyFactory("dynamo0.1").create(info)
    invokes = psy.invokes
    print(psy.invokes.names)
    invoke = invokes.get("invoke_0_testkern_type")
    schedule = invoke.schedule
    schedule.view()
    kern = schedule.children[0].loop_body[0]
    # setting module inline directly
    kern.module_inline = True
    schedule.view()
    # unsetting module inline via a transformation
    trans = KernelModuleInlineTrans()
    schedule, _ = trans.apply(kern, {"inline": False})
    schedule.view()
    # setting module inline via a transformation
    schedule, _ = trans.apply(kern)
    schedule.view()
    print(str(psy.gen))
Exemple #8
0
def test_globalstoargumentstrans_clash_symboltable(monkeypatch):
    ''' Check the GlobalsToArguments transformation with a symbol name clash
    produces the expected error.'''

    trans = KernelGlobalsToArguments()
    # Construct a testing InvokeSchedule
    _, invoke_info = parse(os.path.join(BASEPATH, "gocean1p0",
                                        "single_invoke_kern_with_use.f90"),
                           api=API)
    psy = PSyFactory(API).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    kernel = invoke.schedule.coded_kernels()[0]

    # Monkeypatch Symbol.resolve_deferred to avoid module searching and
    # importing in this test. In this case we assume the symbol is a
    # DataSymbol of REAL type.
    def create_real(variable):
        return DataSymbol(variable.name,
                          REAL_TYPE,
                          interface=variable.interface)

    monkeypatch.setattr(Symbol, "resolve_deferred", create_real)

    # Add 'rdt' into the symbol table
    kernel.root.symbol_table.add(DataSymbol("rdt", REAL_TYPE))

    # Test transforming a single kernel
    with pytest.raises(KeyError) as err:
        trans.apply(kernel)
    assert ("Couldn't copy 'rdt: <Scalar<REAL, UNDEFINED>, "
            "Global(container='model_mod')>' into the SymbolTable. The name "
            "'rdt' is already used by another symbol." in str(err.value))
Exemple #9
0
def test_unexpected_type_error(dist_mem):
    ''' Check that we raise an exception if an unexpected datatype is found
    when running the ArgOrdering generate method. As it is abstract we use
    the KernCallArgList sub class.

    '''
    full_path = os.path.join(get_base_path(TEST_API),
                             "1.0.1_single_named_invoke.f90")
    _, invoke_info = parse(full_path, api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=dist_mem).create(invoke_info)
    schedule = psy.invokes.invoke_list[0].schedule
    if dist_mem:
        index = 4
    else:
        index = 0
    loop = schedule.children[index]
    kernel = loop.loop_body[0]
    # Sabotage one of the arguments to make it have an invalid type.
    kernel.arguments.args[0]._argument_type = "invalid"
    # Now call KernCallArgList to raise an exception
    create_arg_list = KernCallArgList(kernel)
    with pytest.raises(GenerationError) as excinfo:
        create_arg_list.generate()
    const = LFRicConstants()
    assert ("ArgOrdering.generate(): Unexpected argument "
            "type found. Expected one of '{0}' but found 'invalid'".format(
                const.VALID_ARG_TYPE_NAMES) in str(excinfo.value))
Exemple #10
0
def test_omp_do_within_if():
    ''' Check that we can insert an OpenMP parallel do within an if block. '''
    from psyclone.transformations import OMPParallelLoopTrans
    otrans = OMPParallelLoopTrans()
    _, invoke_info = parse(os.path.join(BASE_PATH, "imperfect_nest.f90"),
                           api=API,
                           line_length=False)
    psy = PSyFactory(API, distributed_memory=False).create(invoke_info)
    schedule = psy.invokes.get('imperfect_nest').schedule
    loop = schedule[0].loop_body[1].else_body[0].else_body[0]
    assert isinstance(loop, nemo.NemoLoop)
    # Apply the transformation to a loop within an else clause
    schedule, _ = otrans.apply(loop)
    gen = str(psy.gen)
    expected = ("    ELSE\n"
                "      !$omp parallel do default(shared), private(ji,jj), "
                "schedule(static)\n"
                "      DO jj = 1, jpj, 1\n"
                "        DO ji = 1, jpi, 1\n"
                "          zdkt(ji, jj) = (ptb(ji, jj, jk - 1, jn) - "
                "ptb(ji, jj, jk, jn)) * wmask(ji, jj, jk)\n"
                "        END DO\n"
                "      END DO\n"
                "      !$omp end parallel do\n"
                "    END IF\n")
    assert expected in gen
Exemple #11
0
def test_operator_nofield_scalar_deref(tmpdir, dist_mem):
    ''' Tests that an operator with no field and a
    scalar argument is implemented correctly in the PSy layer when both
    are obtained by dereferencing derived type objects. '''
    _, invoke_info = parse(os.path.join(
        BASE_PATH, "10.6.1_operator_no_field_scalar_deref.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=dist_mem).create(invoke_info)
    gen = str(psy.gen)

    assert LFRicBuild(tmpdir).code_compiles(psy)

    if dist_mem:
        assert "mesh => opbox_my_mapping_proxy%fs_from%get_mesh()" in gen
    assert "nlayers = opbox_my_mapping_proxy%fs_from%get_nlayers()" in gen
    assert "ndf_w2 = opbox_my_mapping_proxy%fs_from%get_ndf()" in gen
    assert ("qr_init_quadrature_symmetrical%compute_function(BASIS, "
            "opbox_my_mapping_proxy%fs_from, dim_w2, ndf_w2, "
            "basis_w2_qr_init_quadrature_symmetrical)" in gen)
    if dist_mem:
        assert "DO cell=1,mesh%get_last_halo_cell(1)" in gen
    else:
        assert ("DO cell=1,opbox_my_mapping_proxy%fs_from%get_ncell()" in gen)
    assert ("(cell, nlayers, opbox_my_mapping_proxy%ncell_3d, "
            "opbox_my_mapping_proxy%local_stencil, box_b, ndf_w2, "
            "basis_w2_qr_init_quadrature_symmetrical, "
            "np_xy_qr_init_quadrature_symmetrical, "
            "np_z_qr_init_quadrature_symmetrical, "
            "weights_xy_qr_init_quadrature_symmetrical, "
            "weights_z_qr_init_quadrature_symmetrical)" in gen)
Exemple #12
0
def test_default_api():
    ''' Check that parse() picks up the default API if none is specified
    by the caller. We do this simply by checking that it returns OK
    having parsed some dynamo0.3 code. '''
    _, invoke_info = parse(
        os.path.join(TEST_PATH, "1_single_invoke.f90"))
    assert len(invoke_info.calls) == 1
Exemple #13
0
def test_node_dag_returns_digraph(monkeypatch):
    ''' Test that the dag generation returns the expected Digraph object. We
    make this test independent of whether or not graphviz is installed by
    monkeypatching the psyir.nodes.node._graphviz_digraph_class function to
    return a fake digraph class type. '''
    class FakeDigraph(object):
        ''' Fake version of graphviz.Digraph class with key methods
        implemented as noops. '''

        # pylint: disable=redefined-builtin
        def __init__(self, format=None):
            ''' Fake constructor. '''

        def node(self, _name):
            ''' Fake node method. '''

        def edge(self, _name1, _name2, color="red"):
            ''' Fake edge method. '''

        def render(self, filename):
            ''' Fake render method. '''

    monkeypatch.setattr(node, "_graphviz_digraph_class", lambda: FakeDigraph)
    _, invoke_info = parse(os.path.join(BASE_PATH, "1_single_invoke.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    dag = schedule.dag()
    assert isinstance(dag, FakeDigraph)
Exemple #14
0
def test_node_position():
    '''
    Test that the Node class position and abs_position methods return
    the correct value for a Node in a tree. The start position is
    set to 0. Relative position starts from 0 and absolute from 1.
    '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "4.7_multikernel_invokes.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    schedule = invoke.schedule
    child = schedule.children[6]
    # Assert that position of a Schedule (no parent Node) is 0
    assert schedule.position == 0
    # Assert that start_position of any Node is 0
    assert child.START_POSITION == 0
    # Assert that relative and absolute positions return correct values
    assert child.position == 6
    assert child.abs_position == 7
    # Test InternalError for _find_position with an incorrect position
    with pytest.raises(InternalError) as excinfo:
        _, _ = child._find_position(child.root.children, -2)
    assert "started from -2 instead of 0" in str(excinfo.value)
    # Test InternalError for abs_position with a Node that does
    # not belong to the Schedule
    ompdir = OMPDoDirective()
    with pytest.raises(InternalError) as excinfo:
        _ = ompdir.abs_position
    assert ("PSyclone internal error: Error in search for Node position "
            "in the tree") in str(excinfo.value)
Exemple #15
0
def test_field_invoke_uniq_declns_valid_intrinsic():
    ''' Tests that all valid intrinsic types for user-defined field arguments
    ('real' and 'integer') are accepted by Invoke.unique_declarations().

    '''
    _, invoke_info = parse(
        os.path.join(BASE_PATH,
                     "4.14_multikernel_invokes_real_int_field_fs.f90"),
        api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=False).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]

    # Return 'real'-valued fields
    fields_real_args = invoke.unique_declarations(
        LFRicArgDescriptor.VALID_FIELD_NAMES, intrinsic_type="real")
    fields_real = [arg.declaration_name for arg in fields_real_args]
    assert fields_real == ["f1", "f2", "m1", "m2", "f3", "f4", "m3",
                           "m4", "f5", "f6", "m5", "m6", "m7"]

    # Return 'integer'-valued fields
    fields_int_args = invoke.unique_declarations(
        LFRicArgDescriptor.VALID_FIELD_NAMES, intrinsic_type="integer")
    fields_int = [arg.declaration_name for arg in fields_int_args]
    assert fields_int == ["i1", "i2", "n1", "n2", "i3", "i4", "n3", "n4",
                          "i5", "i6", "n5", "n6", "i7", "i8", "n7"]
Exemple #16
0
def test_op_orient_different_space(tmpdir):
    ''' Tests that an operator on different spaces requiring orientation
    information is implemented correctly in the PSy layer. '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "10.4_operator_orient_different_"
                                        "space.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=True).create(invoke_info)
    gen_str = str(psy.gen)

    assert LFRicBuild(tmpdir).code_compiles(psy)

    assert (
        "INTEGER(KIND=i_def), pointer :: orientation_w1(:) => null(), "
        "orientation_w2(:) => null()" in gen_str)
    assert "ndf_w2 = my_mapping_proxy%fs_from%get_ndf()" in gen_str
    assert "ndf_w1 = my_mapping_proxy%fs_to%get_ndf()" in gen_str
    assert "dim_w1 = my_mapping_proxy%fs_to%get_dim_space()" in gen_str
    assert ("CALL qr%compute_function(BASIS, my_mapping_proxy%fs_to, "
            "dim_w1, ndf_w1, basis_w1_qr)" in gen_str)
    assert (
        "orientation_w2 => my_mapping_proxy%fs_from%get_cell_orientation("
        "cell)" in gen_str)
    assert (
        "orientation_w1 => my_mapping_proxy%fs_to%get_cell_orientation(cell)"
        in gen_str)
    assert ("(cell, nlayers, my_mapping_proxy%ncell_3d, "
            "my_mapping_proxy%local_stencil, coord_proxy(1)%data, "
            "coord_proxy(2)%data, coord_proxy(3)%data, ndf_w1, basis_w1_qr, "
            "orientation_w1, ndf_w2, orientation_w2, ndf_w0, undf_w0, "
            "map_w0(:,cell), diff_basis_w0_qr, np_xy_qr, np_z_qr, "
            "weights_xy_qr, weights_z_qr)" in gen_str)
Exemple #17
0
def test_operator_deref(tmpdir, dist_mem):
    ''' Tests that we generate correct names for an operator in the PSy
    layer when obtained by de-referencing a derived type in the Algorithm
    layer. '''
    _, invoke_info = parse(os.path.join(BASE_PATH, "10.8_operator_deref.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API,
                     distributed_memory=dist_mem).create(invoke_info)
    generated_code = str(psy.gen)

    assert LFRicBuild(tmpdir).code_compiles(psy)

    assert (
        "SUBROUTINE invoke_0_testkern_operator_type(mm_w0_op, coord, a, qr)"
        in generated_code)
    assert "TYPE(operator_type), intent(in) :: mm_w0_op" in generated_code
    assert "TYPE(operator_proxy_type) mm_w0_op_proxy" in generated_code
    assert "mm_w0_op_proxy = mm_w0_op%get_proxy()" in generated_code
    assert (
        "CALL testkern_operator_code(cell, nlayers, "
        "mm_w0_op_proxy%ncell_3d, mm_w0_op_proxy%local_stencil, "
        "coord_proxy(1)%data, coord_proxy(2)%data, coord_proxy(3)%data, a, "
        "ndf_w0, undf_w0, map_w0(:,cell), basis_w0_qr, "
        "diff_basis_w0_qr, np_xy_qr, np_z_qr, weights_xy_qr, "
        "weights_z_qr)" in generated_code)
Exemple #18
0
def test_validate_kernel_code_args(monkeypatch):
    '''Test that a coded kernel that conforms to the expected kernel
    metadadata is validated successfully. Also check that the
    appropriate exception is raised if the number of arguments in the
    coded kernel does not match the number of arguments expected by
    the kernel metadata.

    '''
    _, invoke_info = parse(os.path.join(BASE_PATH, "12_kernel_specific.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=True).create(invoke_info)
    schedule = psy.invokes.invoke_list[0].schedule
    # matrix vector kernel
    kernel = schedule[2].loop_body[0]

    kernel.validate_kernel_code_args()

    # Force DynKern to think that this kernel is an 'apply' kernel and
    # therefore does not need the mesh height argument.
    monkeypatch.setattr(kernel, "_cma_operation", "apply")
    with pytest.raises(GenerationError) as info:
        kernel.validate_kernel_code_args()
    assert (
        "In kernel 'matrix_vector_code' the number of arguments indicated by "
        "the kernel metadata is 8 but the actual number of kernel arguments "
        "found is 9." in str(info.value))
Exemple #19
0
def test_operator_nofield(tmpdir):
    ''' Tests that an operator with no field on the same space is
    implemented correctly in the PSy layer. '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "10.1_operator_nofield.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=True).create(invoke_info)
    gen_code_str = str(psy.gen)

    assert LFRicBuild(tmpdir).code_compiles(psy)

    assert (
        "SUBROUTINE invoke_0_testkern_operator_nofield_type(mm_w2, coord, qr)"
        in gen_code_str)
    assert "TYPE(operator_type), intent(in) :: mm_w2" in gen_code_str
    assert "TYPE(operator_proxy_type) mm_w2_proxy" in gen_code_str
    assert "mm_w2_proxy = mm_w2%get_proxy()" in gen_code_str
    assert "undf_w2" not in gen_code_str
    assert "map_w2" not in gen_code_str
    assert ("CALL testkern_operator_nofield_code(cell, nlayers, "
            "mm_w2_proxy%ncell_3d, mm_w2_proxy%local_stencil, "
            "coord_proxy(1)%data, coord_proxy(2)%data, coord_proxy(3)%data, "
            "ndf_w2, basis_w2_qr, ndf_w0, undf_w0, "
            "map_w0(:,cell), diff_basis_w0_qr, np_xy_qr, np_z_qr, "
            "weights_xy_qr, weights_z_qr)" in gen_code_str)
Exemple #20
0
def test_dynkern_setup(monkeypatch):
    ''' Check that internal-consistency checks in DynKern._setup() work
    as expected '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "1.1.0_single_invoke_xyoz_qr.f90"),
                           api=API)
    psy = PSyFactory(API, distributed_memory=True).create(invoke_info)
    # Get hold of a DynKern object
    schedule = psy.invokes.invoke_list[0].schedule
    kern = schedule.children[3].loop_body[0]
    # Monkeypatch a couple of __init__ routines so that we can get past
    # them in the _setup() routine.
    from psyclone.psyGen import CodedKern
    monkeypatch.setattr(CodedKern, "__init__",
                        lambda me, ktype, kcall, parent, check: None)
    from psyclone.parse.algorithm import KernelCall
    monkeypatch.setattr(KernelCall, "__init__",
                        lambda me, mname, ktype, args: None)
    # Break the shape of the quadrature for this kernel
    monkeypatch.setattr(kern, "_eval_shape", value="gh_wrong_shape")
    # Rather than try and mock-up a DynKernMetadata object, it's easier
    # to make one properly by parsing the kernel code.
    ast = fpapi.parse(os.path.join(BASE_PATH, "testkern_qr.F90"),
                      ignore_comments=False)
    name = "testkern_qr_type"
    dkm = DynKernMetadata(ast, name=name)
    # Finally, call the _setup() method
    with pytest.raises(GenerationError) as excinfo:
        kern._setup(dkm, "my module", None, None)
    assert ("Internal error: evaluator shape 'gh_wrong_shape' is not "
            "recognised" in str(excinfo))
Exemple #21
0
def test_operator_bc_kernel_multi_args_err(dist_mem):
    ''' Test that we reject the recognised operator boundary conditions
    kernel if it has more than one argument '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "12.4_enforce_op_bc_kernel.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=dist_mem).create(invoke_info)
    schedule = psy.invokes.invoke_list[0].schedule
    loop = schedule.children[0]
    call = loop.loop_body[0]
    arg = call.arguments.args[0]
    # Make the list of arguments invalid by duplicating (a copy of)
    # this argument. We take a copy because otherwise, when we change
    # the type of arg 1 below, we change it for both.
    call.arguments.args.append(copy.copy(arg))
    with pytest.raises(GenerationError) as excinfo:
        _ = psy.gen
    assert ("Kernel enforce_operator_bc_code has 2 arguments when it "
            "should only have 1 (an LMA operator)") in str(excinfo.value)
    # And again but make the second argument a field this time
    call.arguments.args[1]._argument_type = "gh_field"
    with pytest.raises(GenerationError) as excinfo:
        _ = psy.gen
    assert ("Kernel enforce_operator_bc_code has 2 arguments when it "
            "should only have 1 (an LMA operator)") in str(excinfo.value)
Exemple #22
0
def get_invoke(algfile, api, idx=None, name=None, dist_mem=None):
    '''
    Utility method to get the idx'th or named invoke from the algorithm
    in the specified file.

    :param str algfile: name of the Algorithm source file (Fortran).
    :param str api: which PSyclone API this Algorithm uses.
    :param int idx: the index of the invoke from the Algorithm to return
                    or None if name is specified.
    :param str name: the name of the required invoke or None if an index
                     is supplied.
    :param bool dist_mem: if the psy instance should be created with or \
                          without distributed memory support.

    :returns: (psy object, invoke object)
    :rtype: 2-tuple containing :py:class:`psyclone.psyGen.PSy` and
            :py:class:`psyclone.psyGen.Invoke` objects.
    :raises RuntimeError: if neither idx or name are supplied or if
                          both are supplied
    :raises RuntimeError: if the supplied name does not match an invoke in
                          the Algorithm
    '''

    if (idx is None and not name) or (idx is not None and name):
        raise RuntimeError("Either the index or the name of the "
                           "requested invoke must be specified")

    _, info = parse(os.path.join(get_base_path(api), algfile), api=api)
    psy = PSyFactory(api, distributed_memory=dist_mem).create(info)
    if name:
        invoke = psy.invokes.get(name)
    else:
        invoke = psy.invokes.invoke_list[idx]
    return psy, invoke
Exemple #23
0
def test_omp_explicit_gen():
    ''' Check code generation for a single explicit loop containing
    a kernel. '''
    _, invoke_info = parse(os.path.join(BASE_PATH, "explicit_do.f90"),
                           api=API,
                           line_length=False)
    psy = PSyFactory(API, distributed_memory=False).create(invoke_info)
    schedule = psy.invokes.get('explicit_do').schedule
    omp_trans = TransInfo().get_trans_name('OMPParallelLoopTrans')

    for loop in schedule.loops():
        kernel = loop.kernel
        if kernel and loop.loop_type == "levels":
            schedule, _ = omp_trans.apply(loop)
    gen_code = str(psy.gen).lower()
    expected = ("program explicit_do\n"
                "  implicit none\n"
                "  integer :: ji, jj, jk\n"
                "  integer :: jpi, jpj, jpk\n"
                "  real, dimension(jpi, jpj, jpk) :: umask\n"
                "  !$omp parallel do default(shared), private(ji,jj,jk), "
                "schedule(static)\n"
                "  do jk = 1, jpk\n"
                "    do jj = 1, jpj\n"
                "      do ji = 1, jpi\n"
                "        umask(ji, jj, jk) = ji * jj * jk / r\n"
                "      end do\n"
                "    end do\n"
                "  end do\n"
                "  !$omp end parallel do\n"
                "end program explicit_do")
    assert expected in gen_code
    # Check that calling gen a second time gives the same code
    gen_code = str(psy.gen).lower()
    assert expected in gen_code
Exemple #24
0
def test_scalar(monkeypatch):
    '''Test that the KernelInterface class scalar method adds the expected
    class to the symbol table and the _arglist list. Also check that
    it raises the expected exception if the scalar type is not
    recognised.

    '''
    kernel_interface = KernelInterface(None)
    _, invoke_info = parse(os.path.join(
        BASE_PATH, "1.6.1_single_invoke_1_int_scalar.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=False).create(invoke_info)
    schedule = psy.invokes.invoke_list[0].schedule
    kernel = schedule[0].loop_body[0]
    scalar_arg = kernel.args[1]
    kernel_interface = KernelInterface(None)
    kernel_interface.scalar(scalar_arg)
    symbol = kernel_interface._symbol_table.lookup(scalar_arg.name)
    assert isinstance(symbol, lfric_psyir.LfricIntegerScalarDataSymbol)
    assert isinstance(symbol.interface, ArgumentInterface)
    assert (symbol.interface.access == INTENT_MAPPING[scalar_arg.intent])
    assert kernel_interface._arglist[-1] is symbol
    # Force an error
    monkeypatch.setattr(scalar_arg, "_intrinsic_type", "invalid")
    with pytest.raises(NotImplementedError) as info:
        kernel_interface.scalar(scalar_arg)
    assert ("scalar of type 'invalid' not implemented in KernelInterface "
            "class." in str(info.value))
Exemple #25
0
def test_globalstoargumentstrans_unsupported_gocean_scalar(monkeypatch):
    ''' Check the GlobalsToArguments transformation when the global is
    a type not supported by the GOcean infrastructure raises an Error'''

    trans = KernelGlobalsToArguments()

    # Construct a testing InvokeSchedule
    _, invoke_info = parse(os.path.join(BASEPATH, "gocean1p0",
                                        "single_invoke_kern_with_use.f90"),
                           api=API)
    psy = PSyFactory(API).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    kernel = invoke.schedule.coded_kernels()[0]

    # In this case we set it to be of type CHARACTER as that is not supported
    # in the GOcean infrastructure.
    def create_data_symbol(arg):
        symbol = DataSymbol(arg.name, CHARACTER_TYPE, interface=arg.interface)
        return symbol

    monkeypatch.setattr(Symbol, "resolve_deferred", create_data_symbol)

    # Test transforming a single kernel
    with pytest.raises(TypeError) as err:
        trans.apply(kernel)
    assert ("The global variable 'rdt' could not be promoted to an argument "
            "because the GOcean infrastructure does not have any scalar type "
            "equivalent to the PSyIR Scalar<CHARACTER, UNDEFINED> type."
            in str(err.value))
Exemple #26
0
def test_create_basis_errors(monkeypatch):
    '''Check that the appropriate exceptions are raised when a) an
    evaluator shape is provided, as they are not yet supported, and b)
    an unrecognised quadrature or evaluator shape is found.

    '''
    _, invoke_info = parse(os.path.join(BASE_PATH, "6.1_eval_invoke.f90"),
                           api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=False).create(invoke_info)
    schedule = psy.invokes.invoke_list[0].schedule
    kernel = schedule[0].loop_body[0]
    kernel_interface = KernelInterface(kernel)

    # "w1" requires a basis function and is the first entry in the
    # unique function spaces list
    w1_fs = kernel.arguments.unique_fss[0]
    # Evaluator shapes are not yet supported.
    with pytest.raises(NotImplementedError) as info:
        kernel_interface.basis(w1_fs)
    assert ("Evaluator shapes not implemented in kernel_interface class."
            in str(info.value))
    # Force an unsupported shape
    monkeypatch.setattr(kernel, "_eval_shapes", ["invalid_shape"])
    with pytest.raises(InternalError) as info:
        kernel_interface.basis(w1_fs)
        assert (
            "Unrecognised quadrature or evaluator shape 'invalid_shape'. "
            "Expected one of: ['gh_quadrature_xyoz', 'gh_quadrature_face', "
            "'gh_quadrature_edge', 'gh_evaluator']." in str(info.value))
Exemple #27
0
def test_prolong_vector(tmpdir):
    ''' Check that we generate correct code when an inter-grid kernel
    takes a field vector as argument '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "22.4_intergrid_prolong_vec.f90"),
                           api=API)
    psy = PSyFactory(API, distributed_memory=True).create(invoke_info)
    output = str(psy.gen)

    assert LFRicBuild(tmpdir).code_compiles(psy)

    assert "TYPE(field_type), intent(in) :: field1(3)" in output
    assert "TYPE(field_proxy_type) field1_proxy(3)" in output
    # Make sure we always index into the field arrays
    assert " field1%" not in output
    assert " field2%" not in output
    assert ("ncpc_field1_field2, ncell_field1, field1_proxy(1)%data, "
            "field1_proxy(2)%data, field1_proxy(3)%data, field2_proxy(1)%data,"
            " field2_proxy(2)%data, field2_proxy(3)%data, ndf_w1" in output)
    for idx in [1, 2, 3]:
        assert ("      IF (field2_proxy({0})%is_dirty(depth=1)) THEN\n"
                "        CALL field2_proxy({0})%halo_exchange(depth=1)\n"
                "      END IF\n".format(idx) in output)
        assert ("field1_proxy({0}) = field1({0})%get_proxy()".format(idx)
                in output)
        assert "CALL field1_proxy({0})%set_dirty()".format(idx) in output
        assert "CALL field1_proxy({0})%set_clean(1)".format(idx) in output
Exemple #28
0
def test_script_trans():
    ''' Checks that generator.py works correctly when a
        transformation is provided as a script, i.e. it applies the
        transformations correctly. We use loop fusion as an
        example.

    '''
    root_path = os.path.dirname(os.path.abspath(__file__))
    base_path = os.path.join(root_path, "test_files", "dynamo0p3")
    # First loop fuse explicitly (without using generator.py)
    parse_file = os.path.join(base_path, "4_multikernel_invokes.f90")
    _, invoke_info = parse(parse_file, api="dynamo0.3")
    psy = PSyFactory("dynamo0.3", distributed_memory=True).create(invoke_info)
    invoke = psy.invokes.get("invoke_0")
    schedule = invoke.schedule
    loop1 = schedule.children[4]
    loop2 = schedule.children[5]
    trans = LoopFuseTrans()
    trans.apply(loop1, loop2)
    generated_code_1 = psy.gen
    # Second loop fuse using generator.py and a script
    _, generated_code_2 = generate(parse_file,
                                   api="dynamo0.3",
                                   script_name=os.path.join(
                                       base_path, "loop_fuse_trans.py"))
    # remove module so we do not affect any following tests
    delete_module("loop_fuse_trans")
    # third - check that the results are the same ...
    assert str(generated_code_1) == str(generated_code_2)
def test_writetoread_dag(tmpdir, have_graphviz):
    ''' Test that the GOInvokeSchedule::dag() method works as expected when we
    have two kernels with a write -> read dependency '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "single_invoke_write_to_read.f90"),
                           api=API)
    psy = PSyFactory(API).create(invoke_info)
    invoke = psy.invokes.invoke_list[0]
    old_cwd = tmpdir.chdir()
    invoke.schedule.dag()
    if have_graphviz:
        dot_file = os.path.join(str(tmpdir), "dag")
        assert os.path.isfile(dot_file)
        with open(dot_file, "r") as dfile:
            dot = dfile.read()
        assert dot.startswith("digraph")
        # write -> read means that the second loop can only begin once the
        # first loop is complete. Check that we have the correct forwards
        # dependence (green) and backwards dependence (red).
        assert ('"loop_[outer]_1_end" -> "loop_[outer]_12_start" [color=red]'
                in dot or '"loop_[outer]_1_end" -> "loop_[outer]_12_start" '
                '[color=#ff0000]' in dot)
        assert ('"loop_[outer]_1_end" -> "loop_[outer]_12_start" [color=green]'
                in dot or '"loop_[outer]_1_end" -> "loop_[outer]_12_start" '
                '[color=#00ff00]' in dot)
    old_cwd.chdir()
Exemple #30
0
def test_operator_orientation(tmpdir):
    ''' Tests that an operator requiring orientation information is
    implemented correctly in the PSy layer. '''
    _, invoke_info = parse(os.path.join(BASE_PATH,
                                        "10.2_operator_orient.f90"),
                           api=TEST_API)
    psy = PSyFactory(TEST_API, distributed_memory=True).create(invoke_info)
    gen_str = str(psy.gen)

    assert LFRicBuild(tmpdir).code_compiles(psy)

    assert (
        "SUBROUTINE invoke_0_testkern_operator_orient_type(mm_w1, coord, qr)"
        in gen_str)
    assert "TYPE(operator_type), intent(in) :: mm_w1" in gen_str
    assert "TYPE(operator_proxy_type) mm_w1_proxy" in gen_str
    assert "mm_w1_proxy = mm_w1%get_proxy()" in gen_str
    assert (
        "orientation_w1 => mm_w1_proxy%fs_from%get_cell_orientation"
        "(cell)" in gen_str)
    assert ("CALL testkern_operator_orient_code(cell, nlayers, "
            "mm_w1_proxy%ncell_3d, mm_w1_proxy%local_stencil, "
            "coord_proxy(1)%data, coord_proxy(2)%data, coord_proxy(3)%data, "
            "ndf_w1, basis_w1_qr, orientation_w1, ndf_w0, undf_w0, "
            "map_w0(:,cell), diff_basis_w0_qr, np_xy_qr, np_z_qr, "
            "weights_xy_qr, weights_z_qr)" in gen_str)