Example #1
0
    def __init__(self,
                 incoming,
                 pool_size,
                 stride=None,
                 pad=(0, 0),
                 ignore_border=True,
                 mode='max',
                 **kwargs):
        super(Pool2DLayer, self).__init__(incoming, **kwargs)

        self.pool_size = as_tuple(pool_size, 2)

        if len(self.input_shape) != 4:
            raise ValueError("Tried to create a 2D pooling layer with "
                             "input shape %r. Expected 4 input dimensions "
                             "(batchsize, channels, 2 spatial dimensions)." %
                             (self.input_shape, ))

        if stride is None:
            self.stride = self.pool_size
        else:
            self.stride = as_tuple(stride, 2)

        self.pad = as_tuple(pad, 2)

        self.ignore_border = ignore_border
        self.mode = mode
Example #2
0
    def __init__(self, incoming, pool_size, stride=None, pad=0,
                     ignore_border=True, mode='max', **kwargs):
            super(PoolPerLine, self).__init__(incoming, **kwargs)

            self.pool_size = as_tuple(pool_size, 1)
            self.stride = self.pool_size if stride is None else as_tuple(stride, 1)
            self.pad = as_tuple(pad, 1)
            self.ignore_border = ignore_border
            self.mode = mode
Example #3
0
    def __init__(self,
                 incoming,
                 num_filters,
                 filter_size,
                 stride=1,
                 pad=0,
                 untie_biases=False,
                 W=init.GlorotUniform(),
                 b=init.Constant(0.),
                 nonlinearity=nonlinearities.rectify,
                 flip_filters=True,
                 n=None,
                 **kwargs):
        super(BaseConvLayer, self).__init__(incoming, **kwargs)
        if nonlinearity is None:
            self.nonlinearity = nonlinearities.identity
        else:
            self.nonlinearity = nonlinearity

        if n is None:
            n = len(self.input_shape) - 2
        elif n != len(self.input_shape) - 2:
            raise ValueError("Tried to create a %dD convolution layer with "
                             "input shape %r. Expected %d input dimensions "
                             "(batchsize, channels, %d spatial dimensions)." %
                             (n, self.input_shape, n + 2, n))
        self.n = n
        self.num_filters = num_filters
        self.filter_size = as_tuple(filter_size, n, int)
        self.flip_filters = flip_filters
        self.stride = as_tuple(stride, n, int)
        self.untie_biases = untie_biases

        if pad == 'same':
            if any(s % 2 == 0 for s in self.filter_size):
                raise NotImplementedError(
                    '`same` padding requires odd filter size.')
        if pad == 'valid':
            self.pad = as_tuple(0, n)
        elif pad in ('full', 'same'):
            self.pad = pad
        else:
            self.pad = as_tuple(pad, n, int)

        self.W = self.add_param(W, self.get_W_shape(), name="W")
        if b is None:
            self.b = None
        else:
            if self.untie_biases:
                biases_shape = (num_filters, ) + self.output_shape[2:]
            else:
                biases_shape = (num_filters, )
            self.b = self.add_param(b,
                                    biases_shape,
                                    name="b",
                                    regularizable=False)
Example #4
0
    def _model_fields_set(self):
        """
        Return a set containing the names of validated fields on the model.
        """
        model_fields = set(field.name for field in self.model._meta.fields)

        if self.fields:
            return model_fields & set(as_tuple(self.fields))

        return model_fields - set(as_tuple(self.exclude))
Example #5
0
    def _property_fields_set(self):
        """
        Returns a set containing the names of validated properties on the model.
        """
        property_fields = set(attr for attr in dir(self.model) if
                              isinstance(getattr(self.model, attr, None), property)
                              and not attr.startswith('_'))

        if self.fields:
            return property_fields & set(as_tuple(self.fields))

        return property_fields.union(set(as_tuple(self.include))) - set(as_tuple(self.exclude))
Example #6
0
 def __init__(self,
              incoming,
              num_filters,
              filter_size,
              stride=(1, 1),
              crop=0,
              untie_biases=False,
              W=init.GlorotUniform(),
              b=init.Constant(0.),
              nonlinearity=nonlinearities.rectify,
              flip_filters=False,
              output_size=None,
              **kwargs):
     # output_size must be set before calling the super constructor
     if (not isinstance(output_size, T.Variable)
             and output_size is not None):
         output_size = as_tuple(output_size, 2, int)
     self.output_size = output_size
     super(TransposedConv2DLayer, self).__init__(incoming,
                                                 num_filters,
                                                 filter_size,
                                                 stride,
                                                 crop,
                                                 untie_biases,
                                                 W,
                                                 b,
                                                 nonlinearity,
                                                 flip_filters,
                                                 n=2,
                                                 **kwargs)
     # rename self.pad to self.crop:
     self.crop = self.pad
     del self.pad
Example #7
0
    def _compute(self, part):
        fun = JITModule(self.kernel, self.it_space, *self.args, direct=self.is_direct)
        if not hasattr(self, '_jit_args'):
            self._jit_args = [0, 0]
            if isinstance(self._it_space._iterset, Subset):
                self._jit_args.append(self._it_space._iterset._indices)
            for arg in self.args:
                if arg._is_mat:
                    self._jit_args.append(arg.data.handle.handle)
                else:
                    for d in arg.data:
                        # Cannot access a property of the Dat or we will force
                        # evaluation of the trace
                        self._jit_args.append(d._data)

                if arg._is_indirect or arg._is_mat:
                    maps = as_tuple(arg.map, Map)
                    for map in maps:
                        for m in map:
                            self._jit_args.append(m.values_with_halo)

            for c in Const._definitions():
                self._jit_args.append(c.data)

            self._jit_args.extend(self.offset_args)

            self._jit_args.extend(self.layer_arg)

        if part.size > 0:
            self._jit_args[0] = part.offset
            self._jit_args[1] = part.offset + part.size
            fun(*self._jit_args)
Example #8
0
    def c_addto_vector_field(self):
        maps = as_tuple(self.map, Map)
        nrows = maps[0].arity
        ncols = maps[1].arity
        dims = self.data.sparsity.dims
        rmult = dims[0]
        cmult = dims[1]
        s = []
        for i in xrange(rmult):
            for j in xrange(cmult):
                idx = '[%d][%d]' % (i, j)
                val = "&%s%s" % (self.c_kernel_arg_name(), idx)
                row = "%(m)s * %(map)s[i * %(dim)s + i_0] + %(i)s" % \
                      {'m': rmult,
                       'map': self.c_map_name(),
                       'dim': nrows,
                       'i': i}
                col = "%(m)s * %(map)s2[i * %(dim)s + i_1] + %(j)s" % \
                      {'m': cmult,
                       'map': self.c_map_name(),
                       'dim': ncols,
                       'j': j}

                s.append('addto_scalar(%s, %s, %s, %s, %d)'
                         % (self.c_arg_name(), val, row, col, self.access == WRITE))
        return ';\n'.join(s)
Example #9
0
 def c_wrapper_dec(self):
     if self._is_mixed_mat:
         val = "Mat %(name)s = (Mat)((uintptr_t)PyLong_AsUnsignedLong(_%(name)s))" % \
             {"name": self.c_arg_name()}
         rows, cols = self._dat.sparsity.shape
         for i in range(rows):
             for j in range(cols):
                 val += ";\nMat %(iname)s; MatNestGetSubMat(%(name)s, %(i)d, %(j)d, &%(iname)s)" \
                     % {'name': self.c_arg_name(),
                        'iname': self.c_arg_name(i, j),
                        'i': i,
                        'j': j}
     elif self._is_mat:
         val = "Mat %s = (Mat)((uintptr_t)PyLong_AsUnsignedLong(_%s))" % \
             (self.c_arg_name(0, 0), self.c_arg_name())
     else:
         val = ';\n'.join(["%(type)s *%(name)s = (%(type)s *)(((PyArrayObject *)_%(name)s)->data)"
                          % {'name': self.c_arg_name(i), 'type': self.ctype}
                          for i, _ in enumerate(self.data)])
     if self._is_indirect or self._is_mat:
         for i, map in enumerate(as_tuple(self.map, Map)):
             for j in range(len(map)):
                 val += ";\nint *%(name)s = (int *)(((PyArrayObject *)_%(name)s)->data)" \
                     % {'name': self.c_map_name(i, j)}
     if self._is_vec_map:
         val += self.c_vec_dec()
     return val
Example #10
0
 def c_offset_init(self):
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             val.append("PyObject *%s" % self.c_offset_name(i, j))
     return ", " + ", ".join(val)
Example #11
0
 def c_wrapper_arg(self):
     val = "PyObject *_%(name)s" % {'name': self.c_arg_name()}
     if self._is_indirect or self._is_mat:
         val += ", PyObject *_%(name)s" % {'name': self.c_map_name()}
         maps = as_tuple(self.map, Map)
         if len(maps) is 2:
             val += ", PyObject *_%(name)s" % {'name': self.c_map_name() + '2'}
     return val
Example #12
0
 def c_offset_decl(self):
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, _ in enumerate(map):
             val.append("int *_%(cnt)s = (int *)(((PyArrayObject *)%(cnt)s)->data)" %
                        {'cnt': self.c_offset_name(i, j)})
     return ";\n".join(val)
Example #13
0
 def c_map_decl(self):
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             val.append("int xtr_%(name)s[%(dim)s];" %
                        {'name': self.c_map_name(i, j),
                         'dim': m.arity})
     return '\n'.join(val)+'\n'
Example #14
0
    def __init__(self, incoming, num_filters, filter_size, stride=1, pad=0,
                 untie_biases=False, W=init.Empty(), b=init.Empty(), 
                 nonlinearity=nonlinearities.rectify, flip_filters=True,
                 conv_dim=None, **kwargs):
        super().__init__(incoming, **kwargs)
        if nonlinearity is None:
            self.nonlinearity = nonlinearities.identity
        else:
            self.nonlinearity = nonlinearity

        if conv_dim is None:    # convolution dimension
            conv_dim = len(self.input_shape) - 2
        elif conv_dim != len(self.input_shape) - 2:
            raise ValueError("Tried to create a %dD convolution layer with "
                             "input shape %r. Expected %d input dimensions "
                             "(batch_size, channels, %d spatial dimensions)." %
                             (conv_dim, self.input_shape, conv_dim+2, conv_dim))
        self.conv_dim     = conv_dim       # convolution dimension
        self.num_filters  = num_filters
        self.filter_size  = utils.as_tuple(filter_size, conv_dim, int)
        self.flip_filters = flip_filters
        self.stride       = utils.as_tuple(stride, conv_dim, int)
        self.untie_biases = untie_biases

        if pad == 'same':
            if any(s % 2 == 0 for s in self.filter_size):
                raise NotImplementedError('`same` padding requires odd filter size.')
        if pad == 'valid':
            self.pad = utils.as_tuple(0, conv_dim)
        elif pad in ('full', 'same'):
            self.pad = pad
        else:
            self.pad = utils.as_tuple(pad, conv_dim, int)

        self.W = self.add_param(W, self.get_W_shape(), name='W')
        # self.W = self.register_parameter(W, self.get_W_shape(), name='W')
        if b is None:
            self.b = None
        else:
            if self.untie_biases:
                biases_shape = (num_filters,) + self.output_shape[2:]
            else:
                biases_shape = (num_filters,)
            self.b = self.add_param(b, biases_shape, name='b')
Example #15
0
 def c_map_decl_itspace(self):
     cdim = np.prod(self.data.cdim)
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             val.append("int xtr_%(name)s[%(dim_row)s];\n" %
                        {'name': self.c_map_name(i, j),
                         'dim_row': str(m.arity * cdim) if self._flatten else str(m.arity)})
     return '\n'.join(val)+'\n'
Example #16
0
 def c_add_offset_map(self):
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             for idx in range(m.arity):
                 val.append("xtr_%(name)s[%(ind)s] += _%(off)s[%(ind)s];" %
                            {'name': self.c_map_name(i, j),
                             'off': self.c_offset_name(i, j),
                             'ind': idx})
     return '\n'.join(val)+'\n'
Example #17
0
 def c_map_init(self):
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             for idx in range(m.arity):
                 val.append("xtr_%(name)s[%(ind)s] = *(%(name)s + i * %(dim)s + %(ind)s);" %
                            {'name': self.c_map_name(i, j),
                             'dim': m.arity,
                             'ind': idx})
     return '\n'.join(val)+'\n'
Example #18
0
 def c_wrapper_arg(self):
     if self._is_mat:
         val = "PyObject *_%s" % self.c_arg_name()
     else:
         val = ', '.join(["PyObject *_%s" % self.c_arg_name(i)
                          for i in range(len(self.data))])
     if self._is_indirect or self._is_mat:
         for i, map in enumerate(as_tuple(self.map, Map)):
             for j, m in enumerate(map):
                 val += ", PyObject *_%s" % (self.c_map_name(i, j))
     return val
Example #19
0
File: host.py Project: chromy/PyOP2
 def c_offset_init(self):
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         if not map.iterset._extruded:
             continue
         for j, m in enumerate(map):
             val.append("int *%s" % self.c_offset_name(i, j))
     if len(val) == 0:
         return ""
     return ", " + ", ".join(val)
Example #20
0
    def c_addto_scalar_field(self):
        maps = as_tuple(self.map, Map)
        nrows = maps[0].arity
        ncols = maps[1].arity

        return 'addto_vector(%(mat)s, %(vals)s, %(nrows)s, %(rows)s, %(ncols)s, %(cols)s, %(insert)d)' % \
            {'mat': self.c_arg_name(),
             'vals': self.c_kernel_arg_name(),
             'nrows': nrows,
             'ncols': ncols,
             'rows': "%s + i * %s" % (self.c_map_name(), nrows),
             'cols': "%s2 + i * %s" % (self.c_map_name(), ncols),
             'insert': self.access == WRITE}
Example #21
0
    def __init__(self, incoming, scale_factor, mode='repeat', **kwargs):
        super(Upscale2DLayer, self).__init__(incoming, **kwargs)

        self.scale_factor = as_tuple(scale_factor, 2)

        if self.scale_factor[0] < 1 or self.scale_factor[1] < 1:
            raise ValueError('Scale factor must be >= 1, not {0}'.format(
                self.scale_factor))

        if mode not in {'repeat', 'dilate'}:
            msg = "Mode must be either 'repeat' or 'dilate', not {0}"
            raise ValueError(msg.format(mode))
        self.mode = mode
Example #22
0
File: host.py Project: chromy/PyOP2
    def c_map_bcs(self, sign):
        maps = as_tuple(self.map, Map)
        val = []
        # To throw away boundary condition values, we subtract a large
        # value from the map to make it negative then add it on later to
        # get back to the original
        max_int = 10000000

        need_bottom = False
        # Apply any bcs on the first (bottom) layer
        for i, map in enumerate(maps):
            if not map.iterset._extruded:
                continue
            for j, m in enumerate(map):
                if 'bottom' not in m.implicit_bcs:
                    continue
                need_bottom = True
                for idx in range(m.arity):
                    if m.bottom_mask[idx] < 0:
                        val.append("xtr_%(name)s[%(ind)s] %(sign)s= %(val)s;" %
                                   {'name': self.c_map_name(i, j),
                                    'val': max_int,
                                    'ind': idx,
                                    'sign': sign})
        if need_bottom:
            val.insert(0, "if (j_0 == 0) {")
            val.append("}")

        need_top = False
        pos = len(val)
        # Apply any bcs on last (top) layer
        for i, map in enumerate(maps):
            if not map.iterset._extruded:
                continue
            for j, m in enumerate(map):
                if 'top' not in m.implicit_bcs:
                    continue
                need_top = True
                for idx in range(m.arity):
                    if m.top_mask[idx] < 0:
                        val.append("xtr_%(name)s[%(ind)s] %(sign)s= %(val)s;" %
                                   {'name': self.c_map_name(i, j),
                                    'val': max_int,
                                    'ind': idx,
                                    'sign': sign})
        if need_top:
            val.insert(pos, "if (j_0 == end_layer - 1) {")
            val.append("}")
        return '\n'.join(val)+'\n'
Example #23
0
 def c_add_offset_map_flatten(self):
     cdim = np.prod(self.data.cdim)
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             for idx in range(m.arity):
                 for k in range(cdim):
                     val.append("xtr_%(name)s[%(ind_flat)s] += _%(off)s[%(ind)s] * %(dim)s;" %
                                {'name': self.c_map_name(i, j),
                                 'off': self.c_offset_name(i, j),
                                 'ind': idx,
                                 'ind_flat': str(m.arity * k + idx),
                                 'dim': str(cdim)})
     return '\n'.join(val)+'\n'
Example #24
0
 def c_map_init_flattened(self):
     cdim = np.prod(self.data.cdim)
     maps = as_tuple(self.map, Map)
     val = []
     for i, map in enumerate(maps):
         for j, m in enumerate(map):
             for idx in range(m.arity):
                 for k in range(cdim):
                     val.append("xtr_%(name)s[%(ind_flat)s] = %(dat_dim)s * (*(%(name)s + i * %(dim)s + %(ind)s))%(offset)s;" %
                                {'name': self.c_map_name(i, j),
                                 'dim': m.arity,
                                 'ind': idx,
                                 'dat_dim': str(cdim),
                                 'ind_flat': str(m.arity * k + idx),
                                 'offset': ' + '+str(k) if k > 0 else ''})
     return '\n'.join(val)+'\n'
Example #25
0
File: host.py Project: chromy/PyOP2
 def c_map_decl(self, is_facet=False):
     if self._is_mat:
         dsets = self.data.sparsity.dsets
     else:
         dsets = (self.data.dataset,)
     val = []
     for i, (map, dset) in enumerate(zip(as_tuple(self.map, Map), dsets)):
         for j, (m, d) in enumerate(zip(map, dset)):
             dim = m.arity
             if self._is_dat and self._flatten:
                 dim *= d.cdim
             if is_facet:
                 dim *= 2
             val.append("int xtr_%(name)s[%(dim)s];" %
                        {'name': self.c_map_name(i, j), 'dim': dim})
     return '\n'.join(val)+'\n'
Example #26
0
File: host.py Project: chromy/PyOP2
 def c_map_init(self, is_top=False, layers=1, is_facet=False):
     if self._is_mat:
         dsets = self.data.sparsity.dsets
     else:
         dsets = (self.data.dataset,)
     val = []
     for i, (map, dset) in enumerate(zip(as_tuple(self.map, Map), dsets)):
         for j, (m, d) in enumerate(zip(map, dset)):
             for idx in range(m.arity):
                 if self._is_dat and self._flatten and d.cdim > 1:
                     for k in range(d.cdim):
                         val.append("xtr_%(name)s[%(ind_flat)s] = %(dat_dim)s * (*(%(name)s + i * %(dim)s + %(ind)s)%(off_top)s)%(offset)s;" %
                                    {'name': self.c_map_name(i, j),
                                     'dim': m.arity,
                                     'ind': idx,
                                     'dat_dim': d.cdim,
                                     'ind_flat': m.arity * k + idx,
                                     'offset': ' + '+str(k) if k > 0 else '',
                                     'off_top': ' + start_layer * '+str(m.offset[idx]) if is_top else ''})
                 else:
                     val.append("xtr_%(name)s[%(ind)s] = *(%(name)s + i * %(dim)s + %(ind)s)%(off_top)s;" %
                                {'name': self.c_map_name(i, j),
                                 'dim': m.arity,
                                 'ind': idx,
                                 'off_top': ' + start_layer * '+str(m.offset[idx]) if is_top else ''})
             if is_facet:
                 for idx in range(m.arity):
                     if self._is_dat and self._flatten and d.cdim > 1:
                         for k in range(d.cdim):
                             val.append("xtr_%(name)s[%(ind_flat)s] = %(dat_dim)s * (*(%(name)s + i * %(dim)s + %(ind)s)%(off)s)%(offset)s;" %
                                        {'name': self.c_map_name(i, j),
                                         'dim': m.arity,
                                         'ind': idx,
                                         'dat_dim': d.cdim,
                                         'ind_flat': m.arity * (k + d.cdim) + idx,
                                         'offset': ' + '+str(k) if k > 0 else '',
                                         'off': ' + ' + str(m.offset[idx])})
                     else:
                         val.append("xtr_%(name)s[%(ind)s] = *(%(name)s + i * %(dim)s + %(ind_zero)s)%(off_top)s%(off)s;" %
                                    {'name': self.c_map_name(i, j),
                                     'dim': m.arity,
                                     'ind': idx + m.arity,
                                     'ind_zero': idx,
                                     'off_top': ' + start_layer' if is_top else '',
                                     'off': ' + ' + str(m.offset[idx])})
     return '\n'.join(val)+'\n'
Example #27
0
    def compute(self):
        fun = JITModule(self.kernel, self.it_space.extents, *self.args)
        _args = [0, 0]          # start, stop
        for arg in self.args:
            if arg._is_mat:
                _args.append(arg.data.handle.handle)
            else:
                _args.append(arg.data._data)

            if arg._is_dat:
                maybe_setflags(arg.data._data, write=False)

            if arg._is_indirect or arg._is_mat:
                maps = as_tuple(arg.map, Map)
                for map in maps:
                    _args.append(map.values)

        for c in Const._definitions():
            _args.append(c.data)

        # kick off halo exchanges
        self.halo_exchange_begin()
        # compute over core set elements
        _args[0] = 0
        _args[1] = self.it_space.core_size
        fun(*_args)
        # wait for halo exchanges to complete
        self.halo_exchange_end()
        # compute over remaining owned set elements
        _args[0] = self.it_space.core_size
        _args[1] = self.it_space.size
        fun(*_args)
        # By splitting the reduction here we get two advantages:
        # - we don't double count contributions in halo elements
        # - once our MPI supports the asynchronous collectives in
        #   MPI-3, we can do more comp/comms overlap
        self.reduction_begin()
        if self.needs_exec_halo:
            _args[0] = self.it_space.size
            _args[1] = self.it_space.exec_size
            fun(*_args)
        self.reduction_end()
        self.maybe_set_halo_update_needed()
        for arg in self.args:
            if arg._is_mat:
                arg.data._assemble()
Example #28
0
    def c_addto_vector_field(self, i, j, xtr=""):
        maps = as_tuple(self.map, Map)
        nrows = maps[0].split[i].arity
        ncols = maps[1].split[j].arity
        rmult, cmult = self.data.sparsity[i, j].dims
        s = []
        if self._flatten:
            idx = '[i_0][i_1]'
            val = "&%s%s" % ("buffer_" + self.c_arg_name(), idx)
            row = "%(m)s * %(xtr)s%(map)s[%(elem_idx)si_0 %% %(dim)s] + (i_0 / %(dim)s)" % \
                  {'m': rmult,
                   'map': self.c_map_name(0, i),
                   'dim': nrows,
                   'elem_idx': "i * %d +" % (nrows) if xtr == "" else "",
                   'xtr': xtr}
            col = "%(m)s * %(xtr)s%(map)s[%(elem_idx)si_1 %% %(dim)s] + (i_1 / %(dim)s)" % \
                  {'m': cmult,
                   'map': self.c_map_name(1, j),
                   'dim': ncols,
                   'elem_idx': "i * %d +" % (ncols) if xtr == "" else "",
                   'xtr': xtr}
            return 'addto_scalar(%s, %s, %s, %s, %d)' \
                % (self.c_arg_name(i, j), val, row, col, self.access == WRITE)
        for r in xrange(rmult):
            for c in xrange(cmult):
                idx = '[i_0 + %d][i_1 + %d]' % (r, c)
                val = "&%s%s" % ("buffer_" + self.c_arg_name(), idx)
                row = "%(m)s * %(xtr)s%(map)s[%(elem_idx)si_0] + %(r)s" % \
                      {'m': rmult,
                       'map': self.c_map_name(0, i),
                       'dim': nrows,
                       'r': r,
                       'elem_idx': "i * %d +" % (nrows) if xtr == "" else "",
                       'xtr': xtr}
                col = "%(m)s * %(xtr)s%(map)s[%(elem_idx)si_1] + %(c)s" % \
                      {'m': cmult,
                       'map': self.c_map_name(1, j),
                       'dim': ncols,
                       'c': c,
                       'elem_idx': "i * %d +" % (ncols) if xtr == "" else "",
                       'xtr': xtr}

                s.append('addto_scalar(%s, %s, %s, %s, %d)'
                         % (self.c_arg_name(i, j), val, row, col, self.access == WRITE))
        return ';\n'.join(s)
Example #29
0
File: host.py Project: chromy/PyOP2
    def c_addto_scalar_field(self, i, j, buf_name, extruded=None, is_facet=False):
        maps = as_tuple(self.map, Map)
        nrows = maps[0].split[i].arity
        ncols = maps[1].split[j].arity
        rows_str = "%s + i * %s" % (self.c_map_name(0, i), nrows)
        cols_str = "%s + i * %s" % (self.c_map_name(1, j), ncols)

        if extruded is not None:
            rows_str = extruded + self.c_map_name(0, i)
            cols_str = extruded + self.c_map_name(1, j)

        return 'addto_vector(%(mat)s, %(vals)s, %(nrows)s, %(rows)s, %(ncols)s, %(cols)s, %(insert)d)' % \
            {'mat': self.c_arg_name(i, j),
             'vals': buf_name,
             'nrows': nrows * (2 if is_facet else 1),
             'ncols': ncols * (2 if is_facet else 1),
             'rows': rows_str,
             'cols': cols_str,
             'insert': self.access == WRITE}
Example #30
0
    def _parse(self, stream, content_type):
        """
        Parse the request content.

        May raise a 415 ErrorResponse (Unsupported Media Type), or a 400 ErrorResponse (Bad Request).
        """
        if stream is None or content_type is None:
            return (None, None)

        parsers = as_tuple(self.parsers)

        for parser_cls in parsers:
            parser = parser_cls(self)
            if parser.can_handle_request(content_type):
                return parser.parse(stream)

        raise ErrorResponse(status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
                            {'error': 'Unsupported media type in request \'%s\'.' % 
                            content_type})
Example #31
0
File: host.py Project: chromy/PyOP2
 def c_add_offset_map(self, is_facet=False):
     if self._is_mat:
         dsets = self.data.sparsity.dsets
     else:
         dsets = (self.data.dataset,)
     val = []
     for i, (map, dset) in enumerate(zip(as_tuple(self.map, Map), dsets)):
         if not map.iterset._extruded:
             continue
         for j, (m, d) in enumerate(zip(map, dset)):
             for idx in range(m.arity):
                 if self._is_dat and self._flatten and d.cdim > 1:
                     for k in range(d.cdim):
                         val.append("xtr_%(name)s[%(ind_flat)s] += %(off)s[%(ind)s] * %(dim)s;" %
                                    {'name': self.c_map_name(i, j),
                                     'off': self.c_offset_name(i, j),
                                     'ind': idx,
                                     'ind_flat': m.arity * k + idx,
                                     'dim': d.cdim})
                 else:
                     val.append("xtr_%(name)s[%(ind)s] += %(off)s[%(ind)s];" %
                                {'name': self.c_map_name(i, j),
                                 'off': self.c_offset_name(i, j),
                                 'ind': idx})
             if is_facet:
                 for idx in range(m.arity):
                     if self._is_dat and self._flatten and d.cdim > 1:
                         for k in range(d.cdim):
                             val.append("xtr_%(name)s[%(ind_flat)s] += %(off)s[%(ind)s] * %(dim)s;" %
                                        {'name': self.c_map_name(i, j),
                                         'off': self.c_offset_name(i, j),
                                         'ind': idx,
                                         'ind_flat': m.arity * (k + d.cdim) + idx,
                                         'dim': d.cdim})
                     else:
                         val.append("xtr_%(name)s[%(ind)s] += %(off)s[%(ind_zero)s];" %
                                    {'name': self.c_map_name(i, j),
                                     'off': self.c_offset_name(i, j),
                                     'ind': m.arity + idx,
                                     'ind_zero': idx})
     return '\n'.join(val)+'\n'
Example #32
0
 def __init__(self,
              incoming,
              num_filters,
              filter_size,
              dilation=(1, 1),
              pad=0,
              untie_biases=False,
              W=init.GlorotUniform(),
              b=init.Constant(0.),
              nonlinearity=nonlinearities.rectify,
              flip_filters=False,
              **kwargs):
     self.dilation = as_tuple(dilation, 2, int)
     super(DilatedConv2DLayer, self).__init__(incoming,
                                              num_filters,
                                              filter_size,
                                              1,
                                              pad,
                                              untie_biases,
                                              W,
                                              b,
                                              nonlinearity,
                                              flip_filters,
                                              n=2,
                                              **kwargs)
     # remove self.stride:
     del self.stride
     # require valid convolution
     if self.pad != (0, 0):
         raise NotImplementedError(
             "DilatedConv2DLayer requires pad=0 / (0,0) / 'valid', but "
             "got %r. For a padded dilated convolution, add a PadLayer." %
             (pad, ))
     # require unflipped filters
     if self.flip_filters:
         raise NotImplementedError(
             "DilatedConv2DLayer requires flip_filters=False.")
Example #33
0
    def _compute(self, part):
        fun = JITModule(self.kernel, self.it_space, *self.args)
        if not hasattr(self, '_jit_args'):
            self._jit_args = [0, 0]
            for arg in self.args:
                if arg._is_mat:
                    self._jit_args.append(arg.data.handle.handle)
                else:
                    self._jit_args.append(arg.data._data)

                if arg._is_indirect or arg._is_mat:
                    maps = as_tuple(arg.map, Map)
                    for map in maps:
                        self._jit_args.append(map.values)

            for c in Const._definitions():
                self._jit_args.append(c.data)

            self._jit_args.extend(self.offset_args())

        if part.size > 0:
            self._jit_args[0] = part.offset
            self._jit_args[1] = part.offset + part.size
            fun(*self._jit_args)
Example #34
0
    def c_addto_scalar_field(self, i, j, offsets, extruded=None):
        maps = as_tuple(self.map, Map)
        nrows = maps[0].split[i].arity
        ncols = maps[1].split[j].arity
        rows_str = "%s + i * %s" % (self.c_map_name(0, i), nrows)
        cols_str = "%s + i * %s" % (self.c_map_name(1, j), ncols)

        if extruded is not None:
            rows_str = extruded + self.c_map_name(0, i)
            cols_str = extruded + self.c_map_name(1, j)

        if self._is_mat and self._is_mixed:
            vals = 'scatter_buffer_' + self.c_arg_name(i, j)
        else:
            vals = '&buffer_' + self.c_arg_name() + "".join(["[%d]" % d for d in offsets])

        return 'addto_vector(%(mat)s, %(vals)s, %(nrows)s, %(rows)s, %(ncols)s, %(cols)s, %(insert)d)' % \
            {'mat': self.c_arg_name(i, j),
             'vals': vals,
             'nrows': nrows,
             'ncols': ncols,
             'rows': rows_str,
             'cols': cols_str,
             'insert': self.access == WRITE}
Example #35
0
    def c_map_bcs(self, top_bottom, layers, sign):
        maps = as_tuple(self.map, Map)
        val = []
        if top_bottom is None:
            return ""

        # To throw away boundary condition values, we subtract a large
        # value from the map to make it negative then add it on later to
        # get back to the original
        max_int = 10000000
        if top_bottom[0]:
            # We need to apply the bottom bcs
            val.append("if (j_0 == 0){")
            for i, map in enumerate(maps):
                for j, m in enumerate(map):
                    for idx in range(m.arity):
                        val.append("xtr_%(name)s[%(ind)s] %(sign)s= %(val)s;" %
                                   {'name': self.c_map_name(i, j),
                                    'val': max_int if m.bottom_mask[idx] < 0 else 0,
                                    'ind': idx,
                                    'sign': sign})
            val.append("}")

        if top_bottom[1]:
            # We need to apply the top bcs
            val.append("if (j_0 == layer-2){")
            for i, map in enumerate(maps):
                for j, m in enumerate(map):
                    for idx in range(m.arity):
                        val.append("xtr_%(name)s[%(ind)s] %(sign)s= %(val)s;" %
                                   {'name': self.c_map_name(i, j),
                                    'val': max_int if m.top_mask[idx] < 0 else 0,
                                    'ind': idx,
                                    'sign': sign})
            val.append("}")
        return '\n'.join(val)+'\n'
Example #36
0
    def _compute(self, part):
        fun = JITModule(self.kernel, self.it_space, *self.args, direct=self.is_direct, iterate=self.iteration_region)
        if not hasattr(self, '_jit_args'):
            self._argtypes = [ctypes.c_int, ctypes.c_int]
            self._jit_args = [0, 0]
            if isinstance(self._it_space._iterset, Subset):
                self._argtypes.append(self._it_space._iterset._argtype)
                self._jit_args.append(self._it_space._iterset._indices)
            for arg in self.args:
                if arg._is_mat:
                    self._argtypes.append(arg.data._argtype)
                    self._jit_args.append(arg.data.handle.handle)
                else:
                    for d in arg.data:
                        # Cannot access a property of the Dat or we will force
                        # evaluation of the trace
                        self._argtypes.append(d._argtype)
                        self._jit_args.append(d._data)

                if arg._is_indirect or arg._is_mat:
                    maps = as_tuple(arg.map, Map)
                    for map in maps:
                        for m in map:
                            self._argtypes.append(m._argtype)
                            self._jit_args.append(m.values_with_halo)

            for c in Const._definitions():
                self._argtypes.append(c._argtype)
                self._jit_args.append(c.data)

            for a in self.offset_args:
                self._argtypes.append(ndpointer(a.dtype, shape=a.shape))
                self._jit_args.append(a)

            if self.iteration_region in [ON_BOTTOM]:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(0)
                self._jit_args.append(1)
            if self.iteration_region in [ON_TOP]:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(self._it_space.layers - 2)
                self._jit_args.append(self._it_space.layers - 1)
            elif self.iteration_region in [ON_INTERIOR_FACETS]:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(0)
                self._jit_args.append(self._it_space.layers - 2)
            elif self._it_space._extruded:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(0)
                self._jit_args.append(self._it_space.layers - 1)

        self._jit_args[0] = part.offset
        self._jit_args[1] = part.offset + part.size
        # Must call fun on all processes since this may trigger
        # compilation.
        with timed_region("ParLoop kernel"):
            fun(*self._jit_args, argtypes=self._argtypes, restype=None)
Example #37
0
    def _compute(self, part):
        fun = JITModule(self.kernel,
                        self.it_space,
                        *self.args,
                        direct=self.is_direct,
                        iterate=self.iteration_region)
        if not hasattr(self, '_jit_args'):
            self._argtypes = [ctypes.c_int, ctypes.c_int]
            self._jit_args = [0, 0]
            if isinstance(self._it_space._iterset, Subset):
                self._argtypes.append(self._it_space._iterset._argtype)
                self._jit_args.append(self._it_space._iterset._indices)
            for arg in self.args:
                if arg._is_mat:
                    self._argtypes.append(arg.data._argtype)
                    self._jit_args.append(arg.data.handle.handle)
                else:
                    for d in arg.data:
                        # Cannot access a property of the Dat or we will force
                        # evaluation of the trace
                        self._argtypes.append(d._argtype)
                        self._jit_args.append(d._data)

                if arg._is_indirect or arg._is_mat:
                    maps = as_tuple(arg.map, Map)
                    for map in maps:
                        for m in map:
                            self._argtypes.append(m._argtype)
                            self._jit_args.append(m.values_with_halo)

            for c in Const._definitions():
                self._argtypes.append(c._argtype)
                self._jit_args.append(c.data)

            for a in self.offset_args:
                self._argtypes.append(ndpointer(a.dtype, shape=a.shape))
                self._jit_args.append(a)

            if self.iteration_region in [ON_BOTTOM]:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(0)
                self._jit_args.append(1)
            if self.iteration_region in [ON_TOP]:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(self._it_space.layers - 2)
                self._jit_args.append(self._it_space.layers - 1)
            elif self.iteration_region in [ON_INTERIOR_FACETS]:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(0)
                self._jit_args.append(self._it_space.layers - 2)
            elif self._it_space._extruded:
                self._argtypes.append(ctypes.c_int)
                self._argtypes.append(ctypes.c_int)
                self._jit_args.append(0)
                self._jit_args.append(self._it_space.layers - 1)

        self._jit_args[0] = part.offset
        self._jit_args[1] = part.offset + part.size
        # Must call fun on all processes since this may trigger
        # compilation.
        with timed_region("ParLoop kernel"):
            fun(*self._jit_args, argtypes=self._argtypes, restype=None)