def instanceNorm2d(data, scale, bias, epsilon=1e-5): batchsize = data.shape[0] if batchsize > 1: extscale = Utils.tile(scale, batchsize, axis=1) extbias = Utils.tile(bias, batchsize, axis=1) else: extscale = scale extbias = bias indata = data.reshape(1, batchsize * data.shape[1], data.shape[2], data.shape[3]) mean = Driver.empty(queue, (1, indata.shape[1], 1, 1), dtype=np.float32, allocator=memPool) var = Driver.empty(queue, (1, indata.shape[1], 1, 1), dtype=np.float32, allocator=memPool) outdata, savemean, saveinvvar = MIOpen.batchNorm2d(indata, extscale, extbias, mean, var, epsilon, test=False) return outdata.reshape(data.shape), savemean, saveinvvar, extscale
def instanceNorm2dBackward(grad, data, extscale, savemean, saveinvvar, epsilon, affine=True): batchsize, maps = grad.shape[:2] outgrad = grad.reshape(1, batchsize * grad.shape[1], grad.shape[2], grad.shape[3]) indata = data.reshape(1, batchsize * data.shape[1], data.shape[2], data.shape[3]) ingrad, scalegrad, bgrad = MIOpen.batchNorm2dBackward( indata, outgrad, extscale, savemean, saveinvvar, epsilon) if affine and batchsize > 1: scalegrad = CLBlas.sumOnMatrix(scalegrad.reshape(batchsize, -1)).reshape( 1, maps, 1, 1) bgrad = CLBlas.sumOnMatrix(bgrad.reshape(batchsize, -1)).reshape(1, maps, 1, 1) if affine: return ingrad.reshape(grad.shape), scalegrad, bgrad else: return ingrad.reshape(grad.shape)
def wrapBatchNormNdBackward(data, grad, scale, savemean, saveinvvar, epsilon, mode=None): return MIOpen.batchNorm2dBackward(data, grad, scale, savemean, saveinvvar, epsilon)
def wrapDeconvNd(data, W, bias, stride, pad, dilation, groups, algo): assert dilation == (1, 1) and groups == 1 return MIOpen.conv2d(data, W, bias, stride, pad, mode=MIOpen.ConvMode.transpose, algo=algo)
def wrapConvNdbenchmark(datashape, Wshape, stride, pad, dilation, groups, transpose): assert dilation == (1, 1) and groups == 1 return MIOpen.conv2dbenchmark(datashape, Wshape, stride, pad, mode=MIOpen.ConvMode.transpose if transpose else MIOpen.ConvMode.conv)
def wrapDeconvNdBackwardData(grad, W, data, stride, pad, dilation, groups, algo): assert dilation == (1, 1) and groups == 1 return MIOpen.conv2dBackwardData(grad, W, data, stride, pad, mode=MIOpen.ConvMode.transpose, algo=algo)
def wrapConvNdBackwardParams(data, grad, W, bias, stride, pad, dilation, groups, wgrad, bgrad, scale, momentum, algo): assert dilation == (1, 1) and groups == 1 return MIOpen.conv2dBackwardParams(data, grad, W, bias, stride, pad, wgrad, bgrad, scale, momentum, algo=algo)
def timeConv(datashape, Wshape, stride, pad): fwdResults, bwdFilterResults, bwdDataResults = MIOpen.conv2dbenchmark( datashape, Wshape, stride, pad) formatstr = "%-40s %-25s %-28s" print("Forward results:") for res in fwdResults: print(formatstr % ("Algo %s" % MIOpen.ConvFwdAlgo(res.algo), "time %.6f secs" % res.time, "memory %.6f mbytes" % (res.memory / 1024**2))) print("\nBackward filter results:") for res in bwdFilterResults: print(formatstr % ("Algo %s" % MIOpen.ConvBwdFilterAlgo(res.algo), "time %.6f secs" % res.time, "memory %.6f mbytes" % (res.memory / 1024**2))) print("\nBackward data results:") for res in bwdDataResults: print(formatstr % ("Algo %s" % MIOpen.ConvBwdDataAlgo(res.algo), "time %.6f secs" % res.time, "memory %.6f mbytes" % (res.memory / 1024**2)))
def wrapBatchNormNd(data, scale, bias, mean, var, epsilon, factor, test, mode=None, out=None): return MIOpen.batchNorm2d(data, scale, bias, mean, var, epsilon, factor, test, out=out)
def wrapCrossMapLRNBackward(data, outdata, grad, workspace, N, alpha, beta, K): return MIOpen.lrnBackward(data, outdata, grad, workspace, MIOpen.LRNMode.cross, N, alpha, beta, K)
def wrapCrossMapLRN(data, N, alpha, beta, K, test): result = MIOpen.lrn(data, MIOpen.LRNMode.cross, N, alpha, beta, K, test) return result if not test else (result, None)
def wrapMapLRNBackward(data, outdata, grad, means, workspace, N, alpha, beta, K): assert means is None return MIOpen.lrnBackward(data, outdata, grad, workspace, MIOpen.LRNMode.map, N, alpha, beta, K)
def wrapMapLRN(data, means, N, alpha, beta, K, test): assert means is None result = MIOpen.lrn(data, MIOpen.LRNMode.map, N, alpha, beta, K, test) return result if not test else (result, None)
def wrapPoolNd(data, size, stride, pad, mode, test): result = MIOpen.pool2d(data, size, stride, pad, mode, test) return result if not test else (result, None)
def wrapConvNdBackwardData(grad, W, data, stride, pad, dilation, groups, algo): assert dilation == (1, 1) and groups == 1 return MIOpen.conv2dBackwardData(grad, W, data, stride, pad, algo=algo)
def wrapConvNd(data, W, bias, stride, pad, dilation, groups, algo): assert dilation == (1, 1) and groups == 1 return MIOpen.conv2d(data, W, bias, stride, pad, algo=algo)