def __init__(self, delta, inputs, filters, fprop, **kwargs): """ Arguments: inputs : input tensor. filters : filter/kernel tensor. """ super(bprop_conv, self).__init__(args=(ContiguousOp(delta), ContiguousOp(filters)), fprop=fprop, axes=inputs.axes, **kwargs)
def visit(self, op, input1, input2): replace = False if not isinstance(input1, ContiguousOp): input1 = ContiguousOp(input1) replace = True if not isinstance(input2, ContiguousOp): input2 = ContiguousOp(input2) replace = True if replace: self.replace_op(op, Add(input1, input2))
def visit(self, op, inputs): """ MKLDNN Pooling implementation requires contiguous layout. """ if not isinstance(inputs, ContiguousOp): new_op = PoolingOp(op.pool_params, ContiguousOp(inputs), axes=op.axes) self.replace_op(op, new_op)
def visit(self, op): """ Convolution implementation requires contiguous layout. """ inputs, filters = op.args replace = False if not isinstance(inputs, ContiguousOp): inputs = ContiguousOp(inputs) replace = True if not isinstance(filters, ContiguousOp): filters = ContiguousOp(filters) replace = True if replace: self.replace_op( op, ConvolutionOp(op.conv_params, inputs, filters, axes=op.axes))
def visit(self, op, *args): """ Warp-CTC requires all args to be contiguous """ args = list(args) replace = False for ii, arg in enumerate(args): if not is_contiguous(arg): args[ii] = ContiguousOp(arg) replace = True if replace is True: self.replace_op(op, CTCOp(*args, axes=op.axes))
def visit(self, op, delta, inputs, dbias=None): replace = False # If we have updated op.fprop in this pass, replace this op # with a version with the replaced fprop. fprop = op.fprop replacement_fprop = self.get_replacement(fprop) if replacement_fprop is not None: replace = True fprop = replacement_fprop if not isinstance(delta, ContiguousOp): delta = ContiguousOp(delta) replace = True # if not isinstance(inputs, ContiguousOp): # inputs = ContiguousOp(inputs) # replace = True if replace: self.replace_op(op, update_conv(delta, inputs, self.op_arg(fprop, 1), fprop, op.dbias))