Пример #1
0
def test_tree_node_children():
    lp_opt = _dummy_opts()
    # create mapstore
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))
    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add children
    c2 = arc.creator('c2',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    x = __create_var('x')
    mstore.check_and_add_transform(x, c2, 'i')
    c3 = arc.creator('c3',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(6, 12)),
                                          dtype=arc.kint_type))
    x2 = __create_var('x2')
    mstore.check_and_add_transform(x2, c3, 'i')
    mstore.finalize()

    # check children
    assert mstore.tree.has_children([x, x2]) == [False, False]
    assert mstore.domain_to_nodes[c2].has_children([x, x2]) == [True, False]
    assert mstore.domain_to_nodes[c3].has_children([x, x2]) == [False, True]

    # and finally check the tree search
    x3 = __create_var('x3')
    assert arc.search_tree(mstore.tree.parent, [x, x2, x3]) == [
        mstore.domain_to_nodes[c2], mstore.domain_to_nodes[c3], None
    ]
Пример #2
0
def test_offset_base():
    lp_opt = _dummy_opts()
    c = arc.creator('',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    assert len(mstore.transformed_domains) == 0

    # add a variable
    c2 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(5, 11)),
                                          dtype=arc.kint_type))
    x = __create_var('x')
    mstore.check_and_add_transform(x, c2, 'i')
    mstore.finalize()

    assert len(mstore.transformed_domains) == 2
    assert np.array_equal(mstore.map_domain.initializer,
                          np.arange(10, dtype=arc.kint_type))
    assert mstore.domain_to_nodes[c2] in mstore.transformed_domains
    assert mstore.domain_to_nodes[x].parent == mstore.domain_to_nodes[c2]
Пример #3
0
def test_absolute_root():
    lp_opt = _dummy_opts()
    # create mapstore
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))
    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add children
    c2 = arc.creator('c2',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    x = __create_var('x')
    mstore.check_and_add_transform(x, c2, 'i')

    assert mstore.absolute_root == mstore.domain_to_nodes[c] and \
        mstore.absolute_root.name == 'c'

    # force input map
    c3 = arc.creator('c3',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(6, 12)),
                                          dtype=arc.kint_type))
    x2 = __create_var('x2')
    mstore.check_and_add_transform(x2, c3, 'i')
    mstore.finalize()
    assert mstore.absolute_root != mstore.domain_to_nodes[c] and \
        mstore.absolute_root.name == 'c_map'
Пример #4
0
def test_map_iname_domains():
    lp_opt = _dummy_opts()
    c = arc.creator('base',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    mstore.finalize()
    assert mstore.get_iname_domain() == ('i', '3 <= i <= 12')

    # add an affine map
    mstore = arc.MapStore(lp_opt, c, True, 'i')
    mapv = np.arange(10, dtype=arc.kint_type)
    var = arc.creator('var', arc.kint_type, (10, ), 'C')
    domain = arc.creator('domain',
                         arc.kint_type, (10, ),
                         'C',
                         initializer=mapv)
    mstore.check_and_add_transform(var, domain, 'i')
    mstore.finalize()
    assert mstore.get_iname_domain() == ('i', '3 <= i <= 12')

    # add a non-affine map, domain should bounce to 0-based
    mstore = arc.MapStore(lp_opt, c, True, 'i')
    mapv = np.array(list(range(3)) + list(range(4, 11)), dtype=arc.kint_type)
    var = arc.creator('var2', arc.kint_type, (10, ), 'C')
    domain = arc.creator('domain',
                         arc.kint_type, (10, ),
                         'C',
                         initializer=mapv)
    mstore.check_and_add_transform(var, domain, 'i')
    mstore.finalize()
    assert mstore.get_iname_domain() == ('i', '0 <= i <= 9')

    # check non-contigous
    c = arc.creator('base',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.array(list(range(3)) + list(range(4, 11)),
                                         dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    mstore.finalize()
    assert mstore.get_iname_domain() == ('i', '0 <= i <= 9')
Пример #5
0
def test_contiguous_input():

    # test that creation of mapstore with contiguous map has no effect
    lp_opt = _dummy_opts()
    c = arc.creator('',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(10, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    assert len(mstore.transformed_domains) == 0
Пример #6
0
def test_multiple_inputs():
    lp_opt = _dummy_opts()
    c = arc.creator('',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(10, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add a variable
    c2 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    mstore.check_and_add_transform(__create_var('x2'), c2, 'i')

    # add a mapped variable
    c3 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(5)) + list(range(6, 11)),
                                          dtype=arc.kint_type))
    mstore.check_and_add_transform(__create_var('x3'), c3, 'i')

    # test different vaiable with same map
    c4 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(5)) + list(range(6, 11)),
                                          dtype=arc.kint_type))
    mstore.check_and_add_transform(__create_var('x4'), c4, 'i')

    # add another mapped variable
    c5 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(5, 11)),
                                          dtype=arc.kint_type))
    mstore.check_and_add_transform(__create_var('x5'), c5, 'i')

    mstore.finalize()

    assert mstore.domain_to_nodes[c2] not in mstore.transformed_domains
    assert mstore.domain_to_nodes[c3] in mstore.transformed_domains
    assert mstore.domain_to_nodes[c4] in mstore.transformed_domains
    assert mstore.domain_to_nodes[c5] in mstore.transformed_domains

    assert len(mstore.transformed_domains) == 3
    assert np.array_equal(mstore.map_domain.initializer,
                          np.arange(10, dtype=arc.kint_type))
Пример #7
0
    def test_input_private_memory_creations(self):
        lp_opt = _dummy_opts()
        rate_info = assign_rates(self.store.reacs, self.store.specs,
                                 RateSpecialization.fixed)
        # create name and mapstores
        nstore = arc.NameStore(lp_opt, rate_info, True, self.store.test_size)
        mstore = arc.MapStore(lp_opt, nstore.phi_inds, self.store.test_size,
                              'i')

        # create known input
        jac_lp, jac_str = mstore.apply_maps(nstore.jac, 'j', 'k', 'i')

        assert isinstance(jac_lp,
                          lp.ArrayArg) and jac_lp.shape == nstore.jac.shape
        assert jac_str == 'jac[j, k, i]'
Пример #8
0
def test_non_contiguous_input():
    lp_opt = _dummy_opts()

    # test that creation of mapstore with non-contiguous map forces
    # generation of input map
    c = arc.creator('',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.array(list(range(4)) + list(range(6, 12)),
                                         dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    mstore.finalize()
    assert len(mstore.transformed_domains) == 1
    assert mstore.tree.parent is not None
    assert np.allclose(mstore.tree.parent.domain.initializer, np.arange(10))
Пример #9
0
def test_affine_dict_with_input_map():
    lp_opt = _dummy_opts()

    # make a creator to form the base of the mapstore
    c1 = arc.creator('c1',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(6, 12)),
                                          dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c1, True, 'i')

    # create a variable
    x = __create_var('x')

    assert mstore.apply_maps(x, 'i', affine={'i': 1})[1] == 'x[i_0 + 1]'
Пример #10
0
def test_working_buffer_creations():
    for lp_opt in opts_loop():

        def __shape_compare(shape1, shape2):
            for s1, s2 in zip(*(shape1, shape2)):
                assert str(s1) == str(s2)
            return True

        # make a creator to form the base of the mapstore
        c = arc.creator('',
                        arc.kint_type, (10, ),
                        lp_opt.order,
                        initializer=np.arange(10, dtype=arc.kint_type))

        # and the array to test
        arr = arc.creator('a', arc.kint_type, (10, 10), lp_opt.order)

        # and a final "input" array
        inp = arc.creator('b', arc.kint_type, (10, 10), lp_opt.order)

        mstore = arc.MapStore(lp_opt, c, 8192, 'i')
        arr_lp, arr_str = mstore.apply_maps(
            arr,
            'j',
            'i',
            reshape_to_working_buffer=arc.work_size.name,
            working_buffer_index='k')

        assert isinstance(arr_lp, lp.ArrayArg) and \
            __shape_compare(arr_lp.shape, (arc.work_size.name, 10))
        assert arr_str == 'a[k, i]' if lp_opt.pre_split else 'a[j, i]'

        inp_lp, inp_str = mstore.apply_maps(inp,
                                            'j',
                                            'i',
                                            reshape_to_working_buffer=False,
                                            working_buffer_index=None)
        assert isinstance(inp_lp, lp.ArrayArg) and __shape_compare(
            inp_lp.shape, (10, 10))
        assert inp_str == 'b[j, i]'

        # now test input without the global index
        arr_lp, arr_str = mstore.apply_maps(arr, 'k', 'i')
        assert isinstance(arr_lp, lp.ArrayArg) and __shape_compare(
            arr_lp.shape, (10, 10))
        assert arr_str == 'a[k, i]'
Пример #11
0
def test_input_map_pickup():
    lp_opt = _dummy_opts()

    # test that creation of mapstore with non-contiguous map forces
    # non-transformed variables to pick up the right iname
    c = arc.creator('',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.array(list(range(4)) + list(range(6, 12)),
                                         dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # create a variable
    x = __create_var('x')
    _, x_str = mstore.apply_maps(x, 'i')

    assert 'i_0' in x_str
Пример #12
0
            def _get_kernel_gens():
                # two kernels (one for each generator)
                instructions0 = ("""
                        {arg} = 1
                    """)
                instructions1 = ("""
                        {arg} = 2
                    """)

                # create mapstore
                domain = arc.creator('domain',
                                     arc.kint_type, (10, ),
                                     'C',
                                     initializer=np.arange(
                                         10, dtype=arc.kint_type))
                mapstore = arc.MapStore(opts, domain, None)
                # create global arg
                arg = arc.creator('arg', np.float64,
                                  (arc.problem_size.name, 10), opts.order)
                # create array / array string
                arg_lp, arg_str = mapstore.apply_maps(arg, 'j', 'i')

                # create kernel infos
                knl0 = knl_info('knl0',
                                instructions0.format(arg=arg_str),
                                mapstore,
                                kernel_data=[arg_lp, arc.work_size])
                knl1 = knl_info('knl1',
                                instructions1.format(arg=arg_str),
                                mapstore,
                                kernel_data=[arg_lp, arc.work_size])
                # create generators
                gen0 = make_kernel_generator(opts,
                                             KernelType.dummy, [knl0],
                                             type('', (object, ), {'jac': ''}),
                                             name=knl0.name,
                                             output_arrays=['arg'])
                gen1 = make_kernel_generator(opts,
                                             KernelType.dummy, [knl0, knl1],
                                             type('', (object, ), {'jac': ''}),
                                             depends_on=[gen0],
                                             name=knl1.name,
                                             output_arrays=['arg'])
                return gen0, gen1
Пример #13
0
def test_force_inline():
    lp_opt = _dummy_opts()
    mapv = np.arange(0, 5, dtype=arc.kint_type)
    c = arc.creator('base', arc.kint_type, mapv.shape, 'C', initializer=mapv)

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add an affine map
    mapv = np.array(mapv, copy=True) + 1
    var = arc.creator('var', arc.kint_type, mapv.shape, 'C')
    domain = arc.creator('domain',
                         arc.kint_type,
                         mapv.shape,
                         'C',
                         initializer=mapv)
    mstore.check_and_add_transform(var, domain, 'i')
    _, var_str = mstore.apply_maps(var, 'i')
    assert var_str == 'var[i + 1]'
    assert len(mstore.transform_insns) == 0
Пример #14
0
def test_input_map_domain_transfer():
    # check that a domain on the tree that matches the input map gets
    # transfered to the input map

    lp_opt = _dummy_opts()
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add a creator that matches the coming input map
    c2 = arc.creator('c2',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    x = __create_var('x')
    mstore.check_and_add_transform(x, c2, 'i')

    # and another creator that forces the input map
    c3 = arc.creator('c3',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(6, 12)),
                                          dtype=arc.kint_type))
    x2 = __create_var('x2')
    mstore.check_and_add_transform(x2, c3, 'i')
    mstore.finalize()

    # test that c2 isn't transformed, and resides on new base
    assert len(mstore.transformed_domains) == 2
    assert mstore.domain_to_nodes[c2] not in mstore.transformed_domains
    assert mstore.domain_to_nodes[c2].parent == mstore.tree.parent
    assert mstore.domain_to_nodes[c2].insn is None
    # check that non-affine mapping in there
    assert mstore.domain_to_nodes[c3] in mstore.transformed_domains
    # and the original base
    assert mstore.domain_to_nodes[c] in mstore.transformed_domains
Пример #15
0
def test_contiguous_offset_input_map():
    # same as the above, but check that a non-affine mappable transform
    # results in an input map

    lp_opt = _dummy_opts()
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add a creator that can be mapped affinely
    c2 = arc.creator('c2',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    x = __create_var('x')
    mstore.check_and_add_transform(x, c2, 'i')

    # and another creator that can't be affinely mapped
    c3 = arc.creator('c3',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(4)) + list(range(6, 12)),
                                          dtype=arc.kint_type))
    x2 = __create_var('x2')
    mstore.check_and_add_transform(x2, c3, 'i')
    mstore.finalize()

    # test affine mapping is not transformed (should be moved to input map)
    assert len(mstore.transformed_domains) == 2
    assert mstore.domain_to_nodes[x] not in mstore.transformed_domains
    # check that non-affine and original indicies in there
    assert mstore.domain_to_nodes[c3] in mstore.transformed_domains
    assert mstore.domain_to_nodes[x2].parent.domain == c3
    # and that the tree has been transformed
    assert mstore.tree in mstore.transformed_domains
Пример #16
0
def test_contiguous_offset_input():
    lp_opt = _dummy_opts()
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add a creator that can be mapped affinely
    c2 = arc.creator('c2',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    x = __create_var('x')
    mstore.check_and_add_transform(x, c2, 'i')
    mstore.finalize()

    # test affine mapping in there
    assert len(mstore.transformed_domains) == 1
    assert mstore.domain_to_nodes[c2] in mstore.transformed_domains
    assert mstore.domain_to_nodes[x].parent.domain == c2
    assert mstore.domain_to_nodes[x].iname == 'i + -3'
Пример #17
0
def test_map_to_larger():
    lp_opt = _dummy_opts()
    c = arc.creator('base',
                    arc.kint_type, (5, ),
                    'C',
                    initializer=np.arange(5, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    assert len(mstore.transformed_domains) == 0

    # add a variable
    var = arc.creator('var', arc.kint_type, (10, ), 'C')
    domain = arc.creator('domain',
                         arc.kint_type, (10, ),
                         'C',
                         initializer=np.arange(10, dtype=arc.kint_type))
    # this should work
    mstore.check_and_add_transform(var, domain, 'i')
    var, var_str = mstore.apply_maps(var, 'i')

    assert isinstance(var, lp.ArrayArg)
    assert var_str == 'var[i_0]'
    assert '<> i_0 = domain[i] {id=index_i_0}' in mstore.transform_insns
Пример #18
0
def test_bad_multiple_variable_map():
    lp_opt = _dummy_opts()
    c = arc.creator('',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(10, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # add a variable
    c2 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, dtype=arc.kint_type))
    x2 = __create_var('x2')
    mstore.check_and_add_transform(x2, c2, 'i')

    c3 = arc.creator('',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(3, 13, dtype=arc.kint_type))
    # add the same variable as a different domain, and check error
    with assert_raises(AssertionError):
        mstore.check_and_add_transform(x2, c3, 'i')
Пример #19
0
def test_map_variable_creator():
    lp_opt = _dummy_opts()
    c = arc.creator('base',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    assert len(mstore.transformed_domains) == 0

    # add a variable
    var = arc.creator('var', arc.kint_type, (10, ), 'C')
    domain = arc.creator('domain',
                         arc.kint_type, (10, ),
                         'C',
                         initializer=np.array(list(range(4)) +
                                              list(range(5, 11)),
                                              dtype=arc.kint_type))
    mstore.check_and_add_transform(var, domain, 'i')
    var, var_str = mstore.apply_maps(var, 'i')

    assert isinstance(var, lp.ArrayArg)
    assert var_str == 'var[i_1]'
    assert '<> i_1 = domain[i + 3] {id=index_i_1}' in mstore.transform_insns
Пример #20
0
def test_leaf_inames():
    lp_opt = _dummy_opts()

    c = arc.creator('base',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(10, dtype=arc.kint_type))
    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # create one map
    mapv = np.array(list(range(3)) + list(range(4, 11)), dtype=arc.kint_type)
    mapv2 = np.array(list(range(2)) + list(range(3, 11)), dtype=arc.kint_type)
    domain2 = arc.creator('domain2',
                          arc.kint_type, (10, ),
                          'C',
                          initializer=mapv2)
    domain = arc.creator('domain',
                         arc.kint_type, (10, ),
                         'C',
                         initializer=mapv)
    mstore.check_and_add_transform(domain2, domain, 'i')

    # and another
    var = arc.creator('var', arc.kint_type, (10, ), 'C')
    mstore.check_and_add_transform(var, domain2, 'i')

    # now create var
    _, d_str = mstore.apply_maps(domain, 'i')
    _, d2_str = mstore.apply_maps(domain2, 'i')
    _, v_str = mstore.apply_maps(var, 'i')

    assert d_str == 'domain[i]'
    assert d2_str == 'domain2[i_0]'
    assert v_str == 'var[i_1]'
    assert '<> i_0 = domain[i] {id=index_i_0}' in mstore.transform_insns
    assert '<> i_1 = domain2[i_0] {id=index_i_1}' in mstore.transform_insns
Пример #21
0
    def test_read_initial_condition_generator(self):
        oploop = OptionLoopWrapper.from_get_oploop(self,
                                                   do_conp=False,
                                                   do_vector=True,
                                                   do_sparse=False)
        for opts in oploop:
            # two kernels (one for each generator)
            spec_insns = ("""
                    {spec} = {param} {{id=0}}
                """)
            param_insns = ("""
                    {param} = 1 {{id=1}}
                """)
            # create mapstore
            domain = arc.creator('domain',
                                 arc.kint_type, (10, ),
                                 'C',
                                 initializer=np.arange(10,
                                                       dtype=arc.kint_type))
            mapstore = arc.MapStore(opts, domain, None)
            # create global args
            param = arc.creator(arc.pressure_array, np.float64,
                                (arc.problem_size.name, 10), opts.order)
            spec = arc.creator('longanddistinct', np.float64,
                               (arc.problem_size.name, 10), opts.order)
            namestore = type('', (object, ), {
                'param': param,
                'spec': spec,
                'jac': ''
            })
            # create array / array strings
            param_lp, param_str = mapstore.apply_maps(param, 'j', 'i')
            spec_lp, spec_str = mapstore.apply_maps(spec, 'j', 'i')

            # create kernel infos
            spec_info = knl_info(
                'spec_eval',
                spec_insns.format(param=param_str, spec=spec_str),
                mapstore,
                kernel_data=[spec_lp, param_lp, arc.work_size],
                silenced_warnings=['write_race(0)'])
            param_info = knl_info('param_eval',
                                  param_insns.format(param=param_str),
                                  mapstore,
                                  kernel_data=[param_lp, arc.work_size],
                                  silenced_warnings=['write_race(1)'])
            # create generators
            param_gen = make_kernel_generator(opts,
                                              KernelType.chem_utils,
                                              [param_info],
                                              namestore,
                                              output_arrays=[param.name])
            spec_gen = make_kernel_generator(
                opts,
                KernelType.species_rates, [spec_info],
                namestore,
                depends_on=[param_gen],
                input_arrays=[spec.name, param.name],
                output_arrays=[spec.name])

            # get the record
            with temporary_directory() as tdir:
                spec_gen._make_kernels()
                _, record, _ = spec_gen._generate_wrapping_kernel(tdir)

                # and call the read IC gen
                spec_gen._generate_common(tdir, record)

                # read in header
                with open(
                        os.path.join(
                            tdir,
                            'read_initial_conditions' + header_ext[opts.lang]),
                        'r') as file:
                    file = file.read()
                assert 'double* longanddistinct' in file

                # read in source
                with open(
                        os.path.join(
                            tdir,
                            'read_initial_conditions' + file_ext[opts.lang]),
                        'r') as file:
                    file = file.read()
                assert 'double* longanddistinct' in file
Пример #22
0
def mass_to_mole_factions(loopy_opts, namestore, conp=True, test_size=None):
    """Converts input state vector from mass fractions to mole fractions and state
       variables depending on constant pressure vs constant volue assumption

    Parameters
    ----------
    loopy_opts : `loopy_options` object
        A object containing all the loopy options to execute
    namestore : :class:`array_creator.NameStore`
        The namestore / creator for this method
    conp : bool
        If true, generate equations using constant pressure assumption
        If false, use constant volume equations
    test_size : int
        If not none, this kernel is being used for testing.
        Hence we need to size the arrays accordingly

    Notes
    -----
    Assumes that this is being called at input only!
    This allows us to make the (generally) unsafe assumption that the mole factions
    are _equivalent_ to the moles, as the total number of moles will adjust to
    satisfy the ideal gas relation.


    Returns
    -------
    knl_list : list of :class:`knl_info`
        The generated infos for feeding into the kernel generator for both
        equation types
    """

    # first kernel, determine molecular weight
    mapstore = arc.MapStore(loopy_opts, namestore.num_specs_no_ns, test_size)

    # first, create all arrays
    kernel_data = []

    # add problem size
    kernel_data.extend(arc.initial_condition_dimension_vars(loopy_opts, test_size))

    # need "Yi" and molecular weight / factor arrays

    # add / apply maps
    mapstore.check_and_add_transform(namestore.n_arr,
                                     namestore.phi_spec_inds,
                                     force_inline=True)
    mapstore.check_and_add_transform(namestore.mw_post_arr,
                                     namestore.num_specs_no_ns,
                                     force_inline=True)

    Yi_arr, Yi_str = mapstore.apply_maps(namestore.n_arr, *default_inds)
    mw_inv_arr, mw_inv_str = mapstore.apply_maps(namestore.mw_inv, var_name)
    mw_work_arr, mw_work_str = mapstore.apply_maps(namestore.mw_work, global_ind)

    # add arrays
    kernel_data.extend([Yi_arr, mw_inv_arr, mw_work_arr])

    # initialize molecular weight
    pre_instructions = Template(
        """
            ${mw_work_str} = W_ns_inv {id=init}
            <> work = 0 {id=init_work}
        """
    ).safe_substitute(**locals())

    instructions = Template(
        """
            work = work + (${mw_inv_str} - W_ns_inv) * ${Yi_str} \
                {id=update, dep=init*}
        """).safe_substitute(**locals())

    barrier = ic.get_barrier(loopy_opts, local_memory=False,
                             id='break', dep='update')
    post_instructions = Template(
        """
        ${barrier}
        ${mw_work_str} = (${mw_work_str} + work) {id=final, dep=break, nosync=init}
        """).substitute(**locals())

    can_vectorize, vec_spec = ic.get_deep_specializer(
        loopy_opts, atomic_ids=['final'], init_ids=['init'])

    mw_kernel = k_gen.knl_info(name='molecular_weight_inverse',
                               pre_instructions=[pre_instructions],
                               instructions=instructions,
                               post_instructions=[post_instructions],
                               mapstore=mapstore,
                               var_name=var_name,
                               kernel_data=kernel_data,
                               can_vectorize=can_vectorize,
                               vectorization_specializer=vec_spec,
                               parameters={'W_ns_inv': 1. / np.float64(
                                                namestore.mw_arr[-1])},
                               silenced_warnings=['write_race(final)',
                                                  'write_race(init)'])

    # now convert to moles
    mapstore = arc.MapStore(loopy_opts, namestore.num_specs_no_ns, test_size)

    # first, create all arrays
    kernel_data = []

    # add problem size
    kernel_data.extend(arc.initial_condition_dimension_vars(loopy_opts, test_size))

    # need input "Yi", molecular weight, and moles array

    # add / apply maps
    mapstore.check_and_add_transform(namestore.n_arr,
                                     namestore.phi_spec_inds,
                                     force_inline=True)

    n_arr, n_str = mapstore.apply_maps(namestore.n_arr, *default_inds)
    mw_work_arr, mw_work_str = mapstore.apply_maps(namestore.mw_work, global_ind)
    mw_inv_arr, mw_inv_str = mapstore.apply_maps(namestore.mw_inv, var_name)

    # add arrays
    kernel_data.extend([n_arr, mw_inv_arr, mw_work_arr])

    pre_instructions = Template(
        '<> mw = 1 / ${mw_work_str} {id=init}').safe_substitute(**locals())
    instructions = Template(
        """
            ${n_str} = ${n_str} * ${mw_inv_str} * mw {dep=init}
        """).safe_substitute(**locals())

    can_vectorize, vec_spec = ic.get_deep_specializer(loopy_opts)
    mf_kernel = k_gen.knl_info(name='mole_fraction',
                               pre_instructions=[pre_instructions],
                               instructions=instructions,
                               mapstore=mapstore,
                               var_name=var_name,
                               kernel_data=kernel_data,
                               can_vectorize=can_vectorize,
                               vectorization_specializer=vec_spec)

    return [mw_kernel, mf_kernel]
Пример #23
0
    def create_interior_kernel(for_input):
        shape = __check(for_input)
        name = 'copy_{}'.format('in' if for_input else 'out')
        # get arrays
        arrs = [arrays[x] for x in (inputs if for_input else outputs)]

        # create a dummy map and store
        map_shape = np.arange(shape[1], dtype=arc.kint_type)
        mapper = arc.creator(name, arc.kint_type, map_shape.shape, 'C',
                             initializer=map_shape)
        mapstore = arc.MapStore(loopy_opts, mapper, test_size)

        # determine what other inames we need, if any
        namer = UniqueNameGenerator(set([mapstore.iname]))
        extra_inames = []
        for i in six.moves.range(2, len(shape)):
            iname = namer(mapstore.iname)
            extra_inames.append((iname, '0 <= {} < {}'.format(
                iname, shape[i])))

        indicies = [arc.global_ind, mapstore.iname] + [
            ex[0] for ex in extra_inames]
        global_indicies = indicies[:]
        global_indicies[0] += ' + ' + driver_offset.name

        # bake in SIMD pre-split
        vec_spec = None
        split_spec = None
        conditional_index = get_problem_index(loopy_opts)

        def __build(arr, local, **kwargs):
            inds = global_indicies if not local else indicies
            if isinstance(arr, arc.jac_creator) and arr.is_sparse:
                # this is a sparse Jacobian, hence we have to override the default
                # indexing (as we're doing a straight copy)
                kwargs['ignore_lookups'] = True
            if arr_non_ic(arr):
                return mapstore.apply_maps(arr, *inds, **kwargs)
            else:
                return mapstore.apply_maps(arr, inds[0], **kwargs)

        # create working buffer version of arrays
        working_buffers = []
        working_strs = []
        for arr in arrs:
            arr_lp, arr_str = __build(arr, True, use_local_name=True)
            working_buffers.append(arr_lp)
            working_strs.append(arr_str)

        # create global versions of arrays
        buffers = []
        strs = []
        for arr in arrs:
            arr_lp, arr_str = __build(arr, False, reshape_to_working_buffer=False)
            buffers.append(arr_lp)
            strs.append(arr_str)

        # now create the instructions
        instruction_template = Template("""
            if ${ind} < ${problem_size} ${shape_check}
                ${local_buffer} = ${global_buffer} {id=copy_${name}}
            end
        """) if for_input else Template("""
            if ${ind} < ${problem_size} ${shape_check}
                ${global_buffer} = ${local_buffer} {id=copy_${name}}
            end
        """)

        warnings = []
        instructions = []
        for i, arr in enumerate(arrs):
            # get shape check
            shape_check = ''
            if arr.shape[-1] != shape[-1] and len(arr.shape) == len(shape):
                shape_check = ' and {} < {}'.format(
                    indicies[-1], arr.shape[-1])

            instructions.append(instruction_template.substitute(
                local_buffer=working_strs[i],
                global_buffer=strs[i],
                ind=conditional_index,
                problem_size=arc.problem_size.name,
                name=arr.name,
                shape_check=shape_check))
            warnings.append('write_race(copy_{})'.format(arr.name))
        if loopy_opts.is_simd:
            warnings.append('vectorize_failed')
            warnings.append('unrolled_vector_iname_conditional')
        instructions = '\n'.join(instructions)

        kwargs = {}
        if loopy_opts.lang == 'c':
            # override the number of copies in this function to 1
            # (i.e., 1 per-thread)
            kwargs['iname_domain_override'] = [(arc.global_ind, '0 <= {} < 1'.format(
                arc.global_ind))]

        priorities = ([arc.global_ind + '_outer'] if loopy_opts.pre_split else [
            arc.global_ind]) + [arc.var_name]
        # and return the kernel info
        return knl_info(name=name,
                        instructions=instructions,
                        mapstore=mapstore,
                        var_name=arc.var_name,
                        extra_inames=extra_inames,
                        kernel_data=buffers + working_buffers + [
                          arc.work_size, arc.problem_size, driver_offset],
                        silenced_warnings=warnings,
                        vectorization_specializer=vec_spec,
                        split_specializer=split_spec,
                        unrolled_vector=True,
                        loop_priority=set([tuple(priorities + [
                          iname[0] for iname in extra_inames])]),
                        **kwargs)
Пример #24
0
def get_driver(loopy_opts, namestore, inputs, outputs, driven,
               test_size=None):
    """
    Implements a driver function for kernel evaluation.
    This allows pyJac to utilize a smaller working-buffer (sized to the
    global work size), and implements a static(like) scheduling algorithm

    Notes
    -----
    Currently Loopy doesn't have the machinery to enable native calling of other
    loopy kernels, so we have to fudge this a bit (and this can't be used for
    unit-tests).  Future versions will allow us to natively wrap test functions
    (i.e., once the new function calling interface is in place in Loopy)

    :see:`driver-function` for more information

    Parameters
    ----------
    loopy_opts: :class:`loopy_options`
        The loopy options specifying how to create this kernel
    namestore: :class:`NameStore`
        The namestore class that owns our arrays
    inputs: list of :class:`lp.KernelArgument`
        The arrays that should be copied into internal working buffers
        before calling subfunctions
    outputs: list of :class:`lp.KernelArgument`
        The arrays should be copied back into global memory after calling
        subfunctions
    driven: :class:`kernel_generator`
        The kernel generator to wrap in the driver

    Returns
    -------
    knl_list : list of :class:`knl_info`
        The generated infos for feeding into the kernel generator

    """

    # we have to do some shennanigains here to get this to work in loopy:
    #
    # 1. Loopy currently doesn't allow you to alter the for-loop increment size,
    #    so for OpenCL where we must increment by the global work size, we have to
    #    put a dummy for-loop in, and teach the kernel generator to work around it
    #
    # 2. Additionally, the OpenMP target in Loopy is Coming Soon (TM), hence we need
    #    our own dynamic scheduling preamble for the driver loop (
    #    if we're operating in queue-form)
    #
    # 3. Finally, Loopy is just now supporting the ability to natively call other
    #    kernels, so for the moment we still need to utilize the dummy function
    #    calling we have set-up for the finite difference Jacobian

    # first, get our input / output arrays
    arrays = {}
    to_find = set(listify(inputs)) | set(listify(outputs))
    # create mapping of array names
    array_names = {v.name: v for k, v in six.iteritems(vars(namestore))
                   if isinstance(v, arc.creator) and not (
                    v.fixed_indicies or v.affine)}
    for arr in to_find:
        arr_creator = next((array_names[x] for x in array_names if x == arr), None)
        if arr_creator is None:
            continue
        arrays[arr] = arr_creator

    if len(arrays) != len(to_find):
        missing = to_find - set(arrays.keys())
        logger = logging.getLogger(__name__)
        logger.debug('Input/output arrays for queue_driver kernel {} not found.'
                     .format(stringify_args(missing)))
        raise InvalidInputSpecificationException(missing)

    def arr_non_ic(array_input):
        return len(array_input.shape) > 1

    # ensure the inputs and output are all identically sized (among those that have)
    # a non-initial condition dimension

    def __check(check_input):
        shape = ()

        def _raise(desc, inp, nameref, shape):
            logger = logging.getLogger(__name__)
            logger.debug('{} array for driver kernel {} does not '
                         'match expected shape (from array {}).  '
                         'Expected: ({}), got: ({})'.format(
                            desc, inp.name, nameref,
                            stringify_args(inp.shape),
                            stringify_args(shape))
                         )
            raise InvalidInputSpecificationException(inp.name)

        nameref = None
        desc = 'Input' if check_input else 'Output'
        for inp in [arrays[x] for x in (inputs if check_input else outputs)]:
            if not arr_non_ic(inp):
                # only the initial condition dimension, fine
                continue
            if shape:
                if inp.shape != shape and len(inp.shape) == len(shape):
                    # allow different shapes in the last index
                    if not all(x == y for x, y in zip(*(
                            inp.shape[:-1], shape[:-1]))):
                        _raise(desc, inp, nameref, shape)
                    # otherwise, take the maximum of the shape entry
                    shape = shape[:-1] + (max(shape[-1], inp.shape[-1]),)

                elif inp.shape != shape:
                    _raise(desc, inp, nameref, shape)
            else:
                nameref = inp.name
                shape = inp.shape[:]
        if not shape:
            logger = logging.getLogger(__name__)
            logger.debug('No {} arrays supplied to driver that require '
                         'copying to working buffer!'.format(desc))
            raise InvalidInputSpecificationException('Driver ' + desc + ' arrays')
        return shape

    def create_interior_kernel(for_input):
        shape = __check(for_input)
        name = 'copy_{}'.format('in' if for_input else 'out')
        # get arrays
        arrs = [arrays[x] for x in (inputs if for_input else outputs)]

        # create a dummy map and store
        map_shape = np.arange(shape[1], dtype=arc.kint_type)
        mapper = arc.creator(name, arc.kint_type, map_shape.shape, 'C',
                             initializer=map_shape)
        mapstore = arc.MapStore(loopy_opts, mapper, test_size)

        # determine what other inames we need, if any
        namer = UniqueNameGenerator(set([mapstore.iname]))
        extra_inames = []
        for i in six.moves.range(2, len(shape)):
            iname = namer(mapstore.iname)
            extra_inames.append((iname, '0 <= {} < {}'.format(
                iname, shape[i])))

        indicies = [arc.global_ind, mapstore.iname] + [
            ex[0] for ex in extra_inames]
        global_indicies = indicies[:]
        global_indicies[0] += ' + ' + driver_offset.name

        # bake in SIMD pre-split
        vec_spec = None
        split_spec = None
        conditional_index = get_problem_index(loopy_opts)

        def __build(arr, local, **kwargs):
            inds = global_indicies if not local else indicies
            if isinstance(arr, arc.jac_creator) and arr.is_sparse:
                # this is a sparse Jacobian, hence we have to override the default
                # indexing (as we're doing a straight copy)
                kwargs['ignore_lookups'] = True
            if arr_non_ic(arr):
                return mapstore.apply_maps(arr, *inds, **kwargs)
            else:
                return mapstore.apply_maps(arr, inds[0], **kwargs)

        # create working buffer version of arrays
        working_buffers = []
        working_strs = []
        for arr in arrs:
            arr_lp, arr_str = __build(arr, True, use_local_name=True)
            working_buffers.append(arr_lp)
            working_strs.append(arr_str)

        # create global versions of arrays
        buffers = []
        strs = []
        for arr in arrs:
            arr_lp, arr_str = __build(arr, False, reshape_to_working_buffer=False)
            buffers.append(arr_lp)
            strs.append(arr_str)

        # now create the instructions
        instruction_template = Template("""
            if ${ind} < ${problem_size} ${shape_check}
                ${local_buffer} = ${global_buffer} {id=copy_${name}}
            end
        """) if for_input else Template("""
            if ${ind} < ${problem_size} ${shape_check}
                ${global_buffer} = ${local_buffer} {id=copy_${name}}
            end
        """)

        warnings = []
        instructions = []
        for i, arr in enumerate(arrs):
            # get shape check
            shape_check = ''
            if arr.shape[-1] != shape[-1] and len(arr.shape) == len(shape):
                shape_check = ' and {} < {}'.format(
                    indicies[-1], arr.shape[-1])

            instructions.append(instruction_template.substitute(
                local_buffer=working_strs[i],
                global_buffer=strs[i],
                ind=conditional_index,
                problem_size=arc.problem_size.name,
                name=arr.name,
                shape_check=shape_check))
            warnings.append('write_race(copy_{})'.format(arr.name))
        if loopy_opts.is_simd:
            warnings.append('vectorize_failed')
            warnings.append('unrolled_vector_iname_conditional')
        instructions = '\n'.join(instructions)

        kwargs = {}
        if loopy_opts.lang == 'c':
            # override the number of copies in this function to 1
            # (i.e., 1 per-thread)
            kwargs['iname_domain_override'] = [(arc.global_ind, '0 <= {} < 1'.format(
                arc.global_ind))]

        priorities = ([arc.global_ind + '_outer'] if loopy_opts.pre_split else [
            arc.global_ind]) + [arc.var_name]
        # and return the kernel info
        return knl_info(name=name,
                        instructions=instructions,
                        mapstore=mapstore,
                        var_name=arc.var_name,
                        extra_inames=extra_inames,
                        kernel_data=buffers + working_buffers + [
                          arc.work_size, arc.problem_size, driver_offset],
                        silenced_warnings=warnings,
                        vectorization_specializer=vec_spec,
                        split_specializer=split_spec,
                        unrolled_vector=True,
                        loop_priority=set([tuple(priorities + [
                          iname[0] for iname in extra_inames])]),
                        **kwargs)

    copy_in = create_interior_kernel(True)
    # create a dummy kernel info that simply calls our internal function
    instructions = driven.name + '()'
    # create mapstore
    call_name = driven.name
    repeats = 1
    if loopy_opts.depth:
        # we need 'var_name' to have a non-unity size
        repeats = loopy_opts.vector_width

    map_shape = np.arange(repeats, dtype=arc.kint_type)
    mapper = arc.creator(call_name, arc.kint_type, map_shape.shape, 'C',
                         initializer=map_shape)
    mapstore = arc.MapStore(loopy_opts, mapper, test_size)
    mangler = lp_pregen.MangleGen(call_name, tuple(), tuple())
    kwargs = {}
    if loopy_opts.lang == 'c':
        # override the number of calls to the driven function in the driver, this
        # is currently fixed to 1 (i.e., 1 per-thread)
        kwargs['iname_domain_override'] = [(arc.global_ind, '0 <= {} < 1'.format(
            arc.global_ind))]

    func_call = knl_info(name='driver',
                         instructions=instructions,
                         mapstore=mapstore,
                         kernel_data=[arc.work_size, arc.problem_size],
                         var_name=arc.var_name,
                         extra_inames=copy_in.extra_inames[:],
                         manglers=[mangler],
                         **kwargs)
    copy_out = create_interior_kernel(False)

    # and return
    return [copy_in, func_call, copy_out]
Пример #25
0
def test_duplicate_iname_detection():
    # ensures the same transform isn't picked up multiple times
    lp_opt = _dummy_opts()

    # create dummy map
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # create a mapped domain
    c2 = arc.creator('c',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(3)) + list(range(4, 11)),
                                          dtype=arc.kint_type))

    # add two variables to the same domain
    mstore.check_and_add_transform(__create_var('x'), c2)
    mstore.check_and_add_transform(__create_var('x2'), c2)

    mstore.finalize()

    # ensure there's only one transform insn issued
    assert len(mstore.transform_insns) == 1
    assert [x for x in mstore.transform_insns][0] == \
        mstore.domain_to_nodes[c2].insn

    # now repeat with the variables having initializers
    # to test that leaves aren't mapped
    lp_opt = _dummy_opts()

    # create dummy map
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # create a mapped domain
    c2 = arc.creator('c',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.array(list(range(3)) + list(range(4, 11)),
                                          dtype=arc.kint_type))

    # add two variables to the same domain
    x = __create_var('x')
    x.initializer = np.arange(10)
    x2 = __create_var('x2')
    x2.initializer = np.arange(10)
    mstore.check_and_add_transform(x, c2)
    mstore.check_and_add_transform(x2, c2)

    mstore.finalize()

    # ensure there's only one transform insn issued
    assert len(mstore.transform_insns) == 1
    assert [y for y in mstore.transform_insns][0] == \
        mstore.domain_to_nodes[c2].insn
Пример #26
0
def test_map_range_update():
    lp_opt = _dummy_opts()
    # test a complicated chaining / input map case

    # create dummy map
    c = arc.creator('c',
                    arc.kint_type, (10, ),
                    'C',
                    initializer=np.arange(3, 13, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')

    # next add a creator that doesn't need a map
    c2 = arc.creator('c2',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(10, 0, -1, dtype=arc.kint_type))
    mstore.check_and_add_transform(c2, c, 'i')

    # and a creator that only needs an affine map
    c3 = arc.creator('c3',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(4, 14, dtype=arc.kint_type))
    mstore.check_and_add_transform(c3, c2, 'i')

    # and add a creator that will trigger a transform for c3
    c4 = arc.creator('c4',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(4, 14, dtype=arc.kint_type))
    mstore.check_and_add_transform(c4, c3, 'i')

    # and another affine
    c5 = arc.creator('c5',
                     arc.kint_type, (10, ),
                     'C',
                     initializer=np.arange(3, 13, dtype=arc.kint_type))
    mstore.check_and_add_transform(c5, c4, 'i')
    # and we need a final variable to test c5
    x = __create_var('x')
    mstore.check_and_add_transform(x, c5, 'i')
    mstore.finalize()

    # there should be an affine input map of + 3
    assert (mstore.domain_to_nodes[c] == mstore.tree
            and mstore.tree.insn is None and mstore.tree.iname == 'i + 3'
            and mstore.tree.parent is not None)
    # c2 should be on the tree
    assert (mstore.domain_to_nodes[c2].parent == mstore.tree
            and mstore.domain_to_nodes[c2].insn
            == '<> i_1 = c2[i + 3] {id=index_i_1}')
    # c3 should be an regular transform off c2
    assert (mstore.domain_to_nodes[c3].parent == mstore.domain_to_nodes[c2]
            and mstore.domain_to_nodes[c3].insn
            == '<> i_2 = c3[i_1] {id=index_i_2}')
    # c4 should not have a transform (and thus should take the iname of c3)
    assert (mstore.domain_to_nodes[c4].parent == mstore.domain_to_nodes[c3]
            and mstore.domain_to_nodes[c4].insn is None
            and mstore.domain_to_nodes[c4].iname == 'i_2')
    # and c5 should be an affine of -1 off c4 (using c3's iname)
    assert (mstore.domain_to_nodes[c5].parent == mstore.domain_to_nodes[c4]
            and mstore.domain_to_nodes[c5].insn is None
            and mstore.domain_to_nodes[c5].iname == 'i_2 + -1')
Пример #27
0
    def test_read_initial_conditions(self):
        setup = test_utils.get_read_ics_source()
        wrapper = OptionLoopWrapper.from_get_oploop(self, do_conp=True)
        for opts in wrapper:
            with temporary_build_dirs() as (build_dir, obj_dir, lib_dir):
                conp = wrapper.state['conp']

                # make a dummy generator
                insns = ("""
                        {spec} = {param} {{id=0}}
                    """)
                domain = arc.creator('domain',
                                     arc.kint_type, (10, ),
                                     'C',
                                     initializer=np.arange(
                                         10, dtype=arc.kint_type))
                mapstore = arc.MapStore(opts, domain, None)
                # create global args
                param = arc.creator(arc.pressure_array, np.float64,
                                    (arc.problem_size.name, 10), opts.order)
                spec = arc.creator(arc.state_vector, np.float64,
                                   (arc.problem_size.name, 10), opts.order)
                namestore = type('', (object, ), {'jac': ''})
                # create array / array strings
                param_lp, param_str = mapstore.apply_maps(param, 'j', 'i')
                spec_lp, spec_str = mapstore.apply_maps(spec, 'j', 'i')

                # create kernel infos
                info = knl_info('spec_eval',
                                insns.format(param=param_str, spec=spec_str),
                                mapstore,
                                kernel_data=[spec_lp, param_lp, arc.work_size],
                                silenced_warnings=['write_race(0)'])
                # create generators
                kgen = make_kernel_generator(
                    opts,
                    KernelType.dummy, [info],
                    namestore,
                    input_arrays=[param.name, spec.name],
                    output_arrays=[spec.name],
                    name='ric_tester')
                # make kernels
                kgen._make_kernels()
                # and generate RIC
                _, record, _ = kgen._generate_wrapping_kernel(build_dir)
                kgen._generate_common(build_dir, record)
                ric = os.path.join(
                    build_dir,
                    'read_initial_conditions' + utils.file_ext[opts.lang])

                # write header
                write_aux(build_dir, opts, self.store.specs, self.store.reacs)
                with open(os.path.join(build_dir, 'setup.py'), 'w') as file:
                    file.write(
                        setup.safe_substitute(buildpath=build_dir,
                                              obj_dir=obj_dir))

                # and compile
                from pyjac.libgen import compile, get_toolchain
                toolchain = get_toolchain(opts.lang)
                compile(opts.lang, toolchain, [ric], obj_dir=obj_dir)

                # write wrapper
                self.__write_with_subs('read_ic_wrapper.pyx',
                                       os.path.join(self.store.script_dir,
                                                    'test_utils'),
                                       build_dir,
                                       header_ext=utils.header_ext[opts.lang])
                # setup
                utils.run_with_our_python([
                    os.path.join(build_dir, 'setup.py'), 'build_ext',
                    '--build-lib', lib_dir
                ])

                infile = os.path.join(self.store.script_dir, 'test_utils',
                                      'ric_tester.py.in')
                outfile = os.path.join(lib_dir, 'ric_tester.py')
                # cogify
                try:
                    Cog().callableMain([
                        'cogapp', '-e', '-d', '-Dconp={}'.format(conp), '-o',
                        outfile, infile
                    ])
                except Exception:
                    import logging
                    logger = logging.getLogger(__name__)
                    logger.error('Error generating initial conditions reader:'
                                 ' {}'.format(outfile))
                    raise

                # save phi, param in correct order
                phi = (self.store.phi_cp if conp else self.store.phi_cv)
                savephi = phi.flatten(opts.order)
                param = self.store.P if conp else self.store.V
                savephi.tofile(os.path.join(lib_dir, 'phi_test.npy'))
                param.tofile(os.path.join(lib_dir, 'param_test.npy'))

                # save bin file
                out_file = np.concatenate(
                    (
                        np.reshape(phi[:, 0], (-1, 1)),  # temperature
                        np.reshape(param, (-1, 1)),  # param
                        phi[:, 1:]),
                    axis=1  # species
                )
                out_file = out_file.flatten('K')
                with open(os.path.join(lib_dir, 'data.bin'), 'wb') as file:
                    out_file.tofile(file)

                # and run
                utils.run_with_our_python(
                    [outfile, opts.order,
                     str(self.store.test_size)])
Пример #28
0
    def __get_call_kernel_generator(self, opts, spec_name='spec'):
        # create some test kernels

        # first, make a (potentially) host constant
        const = np.arange(10, dtype=arc.kint_type)
        const = lp.TemporaryVariable('const',
                                     shape=const.shape,
                                     initializer=const,
                                     read_only=True,
                                     address_space=scopes.GLOBAL)

        # and finally two kernels (one for each generator)
        # kernel 0 contains the non-shared temporary
        jac_insns = ("""
                {jac} = {jac} + {spec} {{id=0}}
            """)
        spec_insns = ("""
                {spec} = {spec} + {chem} {{id=1}}
            """)
        chem_insns = ("""
                {chem} = const[i] {{id=2}}
            """)

        # create mapstore
        domain = arc.creator('domain',
                             arc.kint_type, (10, ),
                             'C',
                             initializer=np.arange(10, dtype=arc.kint_type))
        mapstore = arc.MapStore(opts, domain, None)
        # create global args
        jac = arc.creator('jac', np.float64, (arc.problem_size.name, 10),
                          opts.order)
        spec = arc.creator(spec_name, np.float64, (arc.problem_size.name, 10),
                           opts.order)
        chem = arc.creator('chem', np.float64, (arc.problem_size.name, 10),
                           opts.order)
        namestore = type('', (object, ), {
            'jac': jac,
            'spec': spec,
            'chem': chem
        })
        # create array / array strings
        jac_lp, jac_str = mapstore.apply_maps(jac, 'j', 'i')
        spec_lp, spec_str = mapstore.apply_maps(spec, 'j', 'i')
        chem_lp, chem_str = mapstore.apply_maps(chem, 'j', 'i')

        # create kernel infos
        jac_info = knl_info('jac_eval',
                            jac_insns.format(jac=jac_str, spec=spec_str),
                            mapstore,
                            kernel_data=[jac_lp, spec_lp, arc.work_size],
                            silenced_warnings=['write_race(0)'])
        spec_info = knl_info('spec_eval',
                             spec_insns.format(spec=spec_str, chem=chem_str),
                             mapstore,
                             kernel_data=[spec_lp, chem_lp, arc.work_size],
                             silenced_warnings=['write_race(1)'])
        chem_info = knl_info('chem_eval',
                             chem_insns.format(chem=chem_str),
                             mapstore,
                             kernel_data=[chem_lp, const, arc.work_size],
                             silenced_warnings=['write_race(2)'])

        # create generators
        chem_gen = make_kernel_generator(opts,
                                         KernelType.chem_utils, [chem_info],
                                         namestore,
                                         output_arrays=['chem'])
        spec_gen = make_kernel_generator(opts,
                                         KernelType.species_rates, [spec_info],
                                         namestore,
                                         depends_on=[chem_gen],
                                         input_arrays=['chem'],
                                         output_arrays=[spec_name])
        jac_gen = make_kernel_generator(opts,
                                        KernelType.jacobian, [jac_info],
                                        namestore,
                                        depends_on=[spec_gen],
                                        input_arrays=[spec_name],
                                        output_arrays=['jac'])

        return jac_gen
Пример #29
0
    def test_lockstep_driver(self):
        # get rate info
        rate_info = determine_jac_inds(self.store.reacs, self.store.specs,
                                       RateSpecialization.fixed)
        mod_test = get_run_source()

        for kind, loopy_opts in OptionLoopWrapper.from_get_oploop(
                self, do_ratespec=False, langs=get_test_langs(),
                do_vector=True, yield_index=True):

            # make namestore
            namestore = arc.NameStore(loopy_opts, rate_info)

            # kernel 1 - need the jacobian reset kernel
            reset = reset_arrays(loopy_opts, namestore)
            # kernel 2 - incrementer
            # make mapstore, arrays and kernel info
            mapstore = arc.MapStore(loopy_opts, namestore.phi_inds, None)

            # use arrays of 2 & 3 dimensions to test the driver's copying
            base_phi_shape = namestore.n_arr.shape
            P_lp, P_str = mapstore.apply_maps(namestore.P_arr,
                                              arc.global_ind)
            phi_lp, phi_str = mapstore.apply_maps(namestore.n_arr,
                                                  arc.global_ind,
                                                  arc.var_name)
            inputs = [P_lp.name, phi_lp.name]
            base_jac_shape = namestore.jac.shape
            jac_lp, jac_str = mapstore.apply_maps(namestore.jac,
                                                  arc.global_ind,
                                                  arc.var_name,
                                                  arc.var_name)
            outputs = [jac_lp.name]
            kernel_data = [P_lp, phi_lp, jac_lp]
            kernel_data.extend(arc.initial_condition_dimension_vars(
                loopy_opts, None))
            instructions = Template("""
                ${phi_str} = ${phi_str} + ${P_str} {id=0, dep=*}
                ${jac_str} = ${jac_str} + ${phi_str} {id=1, dep=0, nosync=0}
            """).safe_substitute(**locals())

            # handle atomicity
            can_vec, vec_spec = ic.get_deep_specializer(
                loopy_opts, atomic_ids=['1'])
            barriers = []
            if loopy_opts.depth:
                # need a barrier between the reset & the kernel
                barriers = [(0, 1, 'global')]

            inner_kernel = k_gen.knl_info(
                name='inner',
                instructions=instructions,
                mapstore=mapstore,
                var_name=arc.var_name,
                kernel_data=kernel_data,
                silenced_warnings=['write_race(0)', 'write_race(1)'],
                can_vectorize=can_vec,
                vectorization_specializer=vec_spec)

            # put it in a generator
            generator = k_gen.make_kernel_generator(
                loopy_opts, kernel_type=KernelType.dummy,
                name='inner_kernel', kernels=[reset, inner_kernel],
                namestore=namestore,
                input_arrays=inputs[:],
                output_arrays=outputs[:],
                is_validation=True,
                driver_type=DriverType.lockstep,
                barriers=barriers)

            # use a "weird" (non-evenly divisibly by vector width) test-size to
            # properly test the copy-in / copy-out
            test_size = self.store.test_size - 37
            if test_size <= 0:
                test_size = self.store.test_size - 1
                assert test_size > 0
            # and make
            with temporary_build_dirs() as (build, obj, lib):

                numpy_arrays = []

                def __save(shape, name, zero=False):
                    data = np.zeros(shape)
                    if not zero:
                        # make it a simple range
                        data.flat[:] = np.arange(np.prod(shape))
                    # save
                    myname = pjoin(lib, name + '.npy')
                    # need to split inputs / answer
                    np.save(myname, data.flatten('K'))
                    numpy_arrays.append(data.flatten('K'))

                # write 'data'
                import loopy as lp
                for arr in kernel_data:
                    if not isinstance(arr, lp.ValueArg):
                        __save((test_size,) + arr.shape[1:], arr.name,
                               arr.name in outputs)

                # and a parameter
                param = np.zeros((test_size,))
                param[:] = np.arange(test_size)

                # build code
                generator.generate(build,
                                   data_order=loopy_opts.order,
                                   data_filename='data.bin',
                                   for_validation=True)

                # write header
                write_aux(build, loopy_opts, self.store.specs, self.store.reacs)

                # generate wrapper
                pywrap(loopy_opts.lang, build,
                       obj_dir=obj, out_dir=lib,
                       ktype=KernelType.dummy,
                       file_base=generator.name,
                       additional_inputs=inputs[:],
                       additional_outputs=outputs[:])

                # and calling script
                test = pjoin(lib, 'test.py')

                inputs = utils.stringify_args(
                    [pjoin(lib, inp + '.npy') for inp in inputs], use_quotes=True)
                str_outputs = utils.stringify_args(
                    [pjoin(lib, inp + '.npy') for inp in outputs], use_quotes=True)

                num_threads = _get_test_input(
                    'num_threads', psutil.cpu_count(logical=False))
                with open(test, 'w') as file:
                    file.write(mod_test.safe_substitute(
                        package='pyjac_{lang}'.format(
                            lang=utils.package_lang[loopy_opts.lang]),
                        input_args=inputs,
                        test_arrays=str_outputs,
                        output_files=str_outputs,
                        looser_tols='[]',
                        loose_rtol=0,
                        loose_atol=0,
                        rtol=0,
                        atol=0,
                        non_array_args='{}, {}'.format(
                            test_size, num_threads),
                        kernel_name=generator.name.title(),))

                try:
                    utils.run_with_our_python([test])
                except subprocess.CalledProcessError:
                    logger = logging.getLogger(__name__)
                    logger.debug(utils.stringify_args(vars(loopy_opts), kwd=True))
                    assert False, 'lockstep_driver error'

                # calculate answers
                ns = base_jac_shape[1]
                # pressure is added to phi
                phi = numpy_arrays[1].reshape((test_size, ns),
                                              order=loopy_opts.order)
                p_arr = numpy_arrays[0]
                phi = phi + p_arr[:, np.newaxis]
                jac = numpy_arrays[2].reshape((test_size, ns, ns),
                                              order=loopy_opts.order)
                # and the diagonal of the jacobian has the updated pressure added
                jac[:, range(ns), range(ns)] += phi[:, range(ns)]
                # and read in outputs
                test = np.load(pjoin(lib, outputs[0] + '.npy')).reshape(
                    jac.shape, order=loopy_opts.order)
                assert np.array_equal(test, jac)
Пример #30
0
def test_chained_maps():
    lp_opt = _dummy_opts()
    c = arc.creator('base',
                    arc.kint_type, (5, ),
                    'C',
                    initializer=np.arange(5, dtype=arc.kint_type))

    mstore = arc.MapStore(lp_opt, c, True, 'i')
    assert len(mstore.transformed_domains) == 0

    def __get_iname(domain):
        return mstore.domain_to_nodes[domain].iname

    # add a variable
    var = arc.creator('var', arc.kint_type, (10, ), 'C')
    domain = arc.creator('domain',
                         arc.kint_type, (10, ),
                         'C',
                         initializer=np.arange(10, dtype=arc.kint_type))
    # this should work
    mstore.check_and_add_transform(var, domain, 'i')

    # now add a chained map
    var2 = arc.creator('var2', arc.kint_type, (10, ), 'C')
    domain2 = arc.creator('domain2',
                          arc.kint_type, (10, ),
                          'C',
                          initializer=np.arange(10, dtype=arc.kint_type))

    mstore.check_and_add_transform(domain2, domain)
    mstore.check_and_add_transform(var2, domain2)

    # and finally put another chained map that does require a transform
    var3 = arc.creator('var3', arc.kint_type, (10, ), 'C')
    domain3 = arc.creator('domain3',
                          arc.kint_type, (10, ),
                          'C',
                          initializer=np.array(list(range(3)) +
                                               list(range(4, 11)),
                                               dtype=arc.kint_type))

    mstore.check_and_add_transform(domain3, domain2)
    mstore.check_and_add_transform(var3, domain3)

    # now create variables and test
    var_lp, var_str = mstore.apply_maps(var, 'i')

    # test that the base map is there
    assert '<> {0} = domain[i] {{id=index_{0}}}'.format(__get_iname(domain)) in \
        mstore.transform_insns

    # var 1 should be based off domain's iname i_0
    assert var_str == 'var[{}]'.format(__get_iname(var))

    # var 2's iname should be based off domain2's iname
    # however since there is no need for map between domain and domain 2
    # this should _still_be i_0
    var2_lp, var2_str = mstore.apply_maps(var2, 'i')

    assert var2_str == 'var2[{}]'.format(__get_iname(var2))

    # and var 3 should be based off domain 3's iname, i_3
    var3_lp, var3_str = mstore.apply_maps(var3, 'i')
    assert var3_str == 'var3[{}]'.format(__get_iname(var3))
    assert ('<> {0} = domain3[{1}] {{id=index_{0}}}'.format(
        __get_iname(var3), __get_iname(domain2)) in mstore.transform_insns)