i0 = i - d0 return Lamda[i:n0](Sum[sum_limit((r, ), w, n0, di, i0)](x[i0 + di * r] @ w[di])) if batch_size: batch_size = batch_size[0] k = Symbol.k(integer=True) return Lamda[k:batch_size](conv1d(x[k], w)) else: return conv1d(x, w) conv1d = Function.conv1d(real=True, nargs=(2, ), eval=conv1d, shape=property(shape)) def conv2d(x, w, *limits): if limits: (r, ), *_ = limits else: r = (1, 1) l0, l1, in_channels, out_channels = w.shape *batch_size, n0, n1, _in_channels = x.shape assert in_channels == _in_channels def conv2d(x, w):