Ejemplo n.º 1
0
def instanceNorm2d(data, scale, bias, epsilon=1e-5):
    batchsize = data.shape[0]
    if batchsize > 1:
        extscale = Utils.tile(scale, batchsize, axis=1)
        extbias = Utils.tile(bias, batchsize, axis=1)

    else:
        extscale = scale
        extbias = bias

    indata = data.reshape(1, batchsize * data.shape[1], data.shape[2],
                          data.shape[3])
    mean = Driver.empty(queue, (1, indata.shape[1], 1, 1),
                        dtype=np.float32,
                        allocator=memPool)
    var = Driver.empty(queue, (1, indata.shape[1], 1, 1),
                       dtype=np.float32,
                       allocator=memPool)

    outdata, savemean, saveinvvar = MIOpen.batchNorm2d(indata,
                                                       extscale,
                                                       extbias,
                                                       mean,
                                                       var,
                                                       epsilon,
                                                       test=False)
    return outdata.reshape(data.shape), savemean, saveinvvar, extscale
Ejemplo n.º 2
0
def instanceNorm2dBackward(grad,
                           data,
                           extscale,
                           savemean,
                           saveinvvar,
                           epsilon,
                           affine=True):
    batchsize, maps = grad.shape[:2]

    outgrad = grad.reshape(1, batchsize * grad.shape[1], grad.shape[2],
                           grad.shape[3])
    indata = data.reshape(1, batchsize * data.shape[1], data.shape[2],
                          data.shape[3])

    ingrad, scalegrad, bgrad = MIOpen.batchNorm2dBackward(
        indata, outgrad, extscale, savemean, saveinvvar, epsilon)

    if affine and batchsize > 1:
        scalegrad = CLBlas.sumOnMatrix(scalegrad.reshape(batchsize,
                                                         -1)).reshape(
                                                             1, maps, 1, 1)
        bgrad = CLBlas.sumOnMatrix(bgrad.reshape(batchsize,
                                                 -1)).reshape(1, maps, 1, 1)

    if affine:
        return ingrad.reshape(grad.shape), scalegrad, bgrad
    else:
        return ingrad.reshape(grad.shape)
Ejemplo n.º 3
0
 def wrapBatchNormNdBackward(data,
                             grad,
                             scale,
                             savemean,
                             saveinvvar,
                             epsilon,
                             mode=None):
     return MIOpen.batchNorm2dBackward(data, grad, scale, savemean,
                                       saveinvvar, epsilon)
Ejemplo n.º 4
0
 def wrapDeconvNd(data, W, bias, stride, pad, dilation, groups, algo):
     assert dilation == (1, 1) and groups == 1
     return MIOpen.conv2d(data,
                          W,
                          bias,
                          stride,
                          pad,
                          mode=MIOpen.ConvMode.transpose,
                          algo=algo)
Ejemplo n.º 5
0
 def wrapConvNdbenchmark(datashape, Wshape, stride, pad, dilation, groups,
                         transpose):
     assert dilation == (1, 1) and groups == 1
     return MIOpen.conv2dbenchmark(datashape,
                                   Wshape,
                                   stride,
                                   pad,
                                   mode=MIOpen.ConvMode.transpose
                                   if transpose else MIOpen.ConvMode.conv)
Ejemplo n.º 6
0
 def wrapDeconvNdBackwardData(grad, W, data, stride, pad, dilation, groups,
                              algo):
     assert dilation == (1, 1) and groups == 1
     return MIOpen.conv2dBackwardData(grad,
                                      W,
                                      data,
                                      stride,
                                      pad,
                                      mode=MIOpen.ConvMode.transpose,
                                      algo=algo)
Ejemplo n.º 7
0
 def wrapConvNdBackwardParams(data, grad, W, bias, stride, pad, dilation,
                              groups, wgrad, bgrad, scale, momentum, algo):
     assert dilation == (1, 1) and groups == 1
     return MIOpen.conv2dBackwardParams(data,
                                        grad,
                                        W,
                                        bias,
                                        stride,
                                        pad,
                                        wgrad,
                                        bgrad,
                                        scale,
                                        momentum,
                                        algo=algo)
Ejemplo n.º 8
0
def timeConv(datashape, Wshape, stride, pad):
    fwdResults, bwdFilterResults, bwdDataResults = MIOpen.conv2dbenchmark(
        datashape, Wshape, stride, pad)

    formatstr = "%-40s %-25s %-28s"

    print("Forward results:")
    for res in fwdResults:
        print(formatstr % ("Algo %s" % MIOpen.ConvFwdAlgo(res.algo),
                           "time %.6f secs" % res.time, "memory %.6f mbytes" %
                           (res.memory / 1024**2)))

    print("\nBackward filter results:")
    for res in bwdFilterResults:
        print(formatstr % ("Algo %s" % MIOpen.ConvBwdFilterAlgo(res.algo),
                           "time %.6f secs" % res.time, "memory %.6f mbytes" %
                           (res.memory / 1024**2)))

    print("\nBackward data results:")
    for res in bwdDataResults:
        print(formatstr % ("Algo %s" % MIOpen.ConvBwdDataAlgo(res.algo),
                           "time %.6f secs" % res.time, "memory %.6f mbytes" %
                           (res.memory / 1024**2)))
Ejemplo n.º 9
0
 def wrapBatchNormNd(data,
                     scale,
                     bias,
                     mean,
                     var,
                     epsilon,
                     factor,
                     test,
                     mode=None,
                     out=None):
     return MIOpen.batchNorm2d(data,
                               scale,
                               bias,
                               mean,
                               var,
                               epsilon,
                               factor,
                               test,
                               out=out)
Ejemplo n.º 10
0
 def wrapCrossMapLRNBackward(data, outdata, grad, workspace, N, alpha, beta,
                             K):
     return MIOpen.lrnBackward(data, outdata, grad, workspace,
                               MIOpen.LRNMode.cross, N, alpha, beta, K)
Ejemplo n.º 11
0
 def wrapCrossMapLRN(data, N, alpha, beta, K, test):
     result = MIOpen.lrn(data, MIOpen.LRNMode.cross, N, alpha, beta, K,
                         test)
     return result if not test else (result, None)
Ejemplo n.º 12
0
 def wrapMapLRNBackward(data, outdata, grad, means, workspace, N, alpha,
                        beta, K):
     assert means is None
     return MIOpen.lrnBackward(data, outdata, grad, workspace,
                               MIOpen.LRNMode.map, N, alpha, beta, K)
Ejemplo n.º 13
0
 def wrapMapLRN(data, means, N, alpha, beta, K, test):
     assert means is None
     result = MIOpen.lrn(data, MIOpen.LRNMode.map, N, alpha, beta, K, test)
     return result if not test else (result, None)
Ejemplo n.º 14
0
 def wrapPoolNd(data, size, stride, pad, mode, test):
     result = MIOpen.pool2d(data, size, stride, pad, mode, test)
     return result if not test else (result, None)
Ejemplo n.º 15
0
 def wrapConvNdBackwardData(grad, W, data, stride, pad, dilation, groups,
                            algo):
     assert dilation == (1, 1) and groups == 1
     return MIOpen.conv2dBackwardData(grad, W, data, stride, pad, algo=algo)
Ejemplo n.º 16
0
 def wrapConvNd(data, W, bias, stride, pad, dilation, groups, algo):
     assert dilation == (1, 1) and groups == 1
     return MIOpen.conv2d(data, W, bias, stride, pad, algo=algo)