def _softmax(raw, input, dim=None, _stacklevel=3): # for F.softmax x=raw(input, dim=dim) if dim is None: dim=F._get_softmax_dim('softmax', input.dim(), _stacklevel) name = log.add_layer(name='softmax') log.add_blobs([x], name='softmax_blob') layer = caffe_net.Layer_param(name=name, type='Softmax', bottom=[log.get_blobs(input)], top=[log.get_blobs(x)]) layer.param.softmax_param.axis=dim log.cnet.add_layer(layer) return x
def _softmax(translog, raw, input, dim=None, _stacklevel=3, torch_name=None): # for F.softmax x = raw(input, dim=dim) if dim is None: dim = F._get_softmax_dim("softmax", input.dim(), _stacklevel) bottom_blobs = [translog.blobs[id(input)]] name = translog.add_layer(name="softmax", torch_name=torch_name) translog.add_blobs([x], name="softmax_blob") layer = caffe_net.Layer_param(name=name, type="Softmax", bottom=bottom_blobs, top=[translog.blobs[id(x)]]) layer.param.softmax_param.axis = dim translog.cnet.add_layer(layer) return x