コード例 #1
0
from chainer.functions.connection.convolution_2d import Convolution2DFunction
from nnoir_chainer.patch import patched_function_apply, patched_function_call

if hasattr(Convolution2DFunction, 'apply'):
    Convolution2DFunction.apply = patched_function_apply(
        Convolution2DFunction.apply)
else:
    Convolution2DFunction.__call__ = patched_function_call(
        Convolution2DFunction.__call__)
コード例 #2
0
from chainer.functions.activation.sigmoid import Sigmoid
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Sigmoid, 'apply'):
    Sigmoid.apply = patched_function_apply(Sigmoid.apply)
else:
    Sigmoid.__call__ = patched_function_call(Sigmoid.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Sigmoid([x.name for x in inputs], [x.name for x in outputs])


Sigmoid.to_nnoir_node = to_nnoir_node
コード例 #3
0
from chainer.functions.normalization.batch_normalization import FixedBatchNormalization
from nnoir_chainer.patch import patched_function_apply, patched_function_call

if hasattr(FixedBatchNormalization, 'apply'):
    FixedBatchNormalization.apply = patched_function_apply(FixedBatchNormalization.apply)
else:
    FixedBatchNormalization.__call__ = patched_function_call(FixedBatchNormalization.__call__)
コード例 #4
0
from chainer.functions.pooling.unpooling_2d import Unpooling2D
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Unpooling2D, 'apply'):
    Unpooling2D.apply = patched_function_apply(Unpooling2D.apply)
else:
    Unpooling2D.__call__ = patched_function_call(Unpooling2D.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Unpooling2D(
        [x.name for x in inputs],
        [x.name for x in outputs],
        kh=self.kh,
        kw=self.kw,
        sy=self.sy,
        sx=self.sx,
        ph=self.ph,
        pw=self.pw,
        cover_all=self.cover_all,
        outh=self.outh,
        outw=self.outw,
    )


Unpooling2D.to_nnoir_node = to_nnoir_node
コード例 #5
0
ファイル: max_pooling_nd.py プロジェクト: yongjiankuang/nnoir
from chainer.functions.pooling.max_pooling_nd import MaxPoolingND
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(MaxPoolingND, 'apply'):
    MaxPoolingND.apply = patched_function_apply(MaxPoolingND.apply)
else:
    MaxPoolingND.__call__ = patched_function_call(MaxPoolingND.__call__)


def to_nnoir_node(self, inputs, outputs):
    if self.ndim != 2:
        raise Exception('unsupported ndim "{}"'.format(self.ndim))
    return NNOIR.MaxPooling2D(
        [x.name for x in inputs],
        [x.name for x in outputs],
        kernel=tuple(self.ksize[:2]),
        stride=tuple(self.stride[:2]),
        pad_h=(self.pad[0], self.pad[0] + self.stride[0] - 1),
        pad_w=(self.pad[1], self.pad[1] + self.stride[1] - 1),
    )


MaxPoolingND.to_nnoir_node = to_nnoir_node
コード例 #6
0
ファイル: basic_math.py プロジェクト: yongjiankuang/nnoir
from chainer.functions.math.basic_math import Add, AddConstant, Sub, Mul, MulConstant
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Add, 'apply'):
    Add.apply = patched_function_apply(Add.apply)
else:
    Add.__call__ = patched_function_call(Add.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Add([x.name for x in inputs], [x.name for x in outputs])


Add.to_nnoir_node = to_nnoir_node

if hasattr(AddConstant, 'apply'):
    AddConstant.apply = patched_function_apply(AddConstant.apply)
else:
    AddConstant.__call__ = patched_function_call(AddConstant.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.AddConstant([x.name for x in inputs], [x.name for x in outputs], value=float(self.value))


AddConstant.to_nnoir_node = to_nnoir_node

if hasattr(Sub, 'apply'):
    Sub.apply = patched_function_apply(Sub.apply)
else:
コード例 #7
0
ファイル: sum.py プロジェクト: yongjiankuang/nnoir
from chainer.functions.math.sum import Sum
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Sum, 'apply'):
    Sum.apply = patched_function_apply(Sum.apply)
else:
    Sum.__call__ = patched_function_call(Sum.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Sum([x.name for x in inputs], [x.name for x in outputs],
                     axes=self.axis,
                     keepdims=self.keepdims)


Sum.to_nnoir_node = to_nnoir_node
コード例 #8
0
from chainer.functions.activation.clipped_relu import ClippedReLU
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(ClippedReLU, 'apply'):
    ClippedReLU.apply = patched_function_apply(ClippedReLU.apply)
else:
    ClippedReLU.__call__ = patched_function_call(ClippedReLU.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.ClippedReLU([x.name for x in inputs], [x.name for x in outputs], upper=self.cap)


ClippedReLU.to_nnoir_node = to_nnoir_node
コード例 #9
0
ファイル: transpose.py プロジェクト: yongjiankuang/nnoir
from chainer.functions.array.transpose import Transpose
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Transpose, 'apply'):
    Transpose.apply = patched_function_apply(Transpose.apply)
else:
    Transpose.__call__ = patched_function_call(Transpose.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Transpose([x.name for x in inputs], [x.name for x in outputs], axes=self.axes)


Transpose.to_nnoir_node = to_nnoir_node
コード例 #10
0
from chainer.functions.connection.linear import LinearFunction
from nnoir_chainer.patch import patched_function_apply, patched_function_call

if hasattr(LinearFunction, 'apply'):
    LinearFunction.apply = patched_function_apply(LinearFunction.apply)
else:
    LinearFunction.__call__ = patched_function_call(LinearFunction.__call__)
コード例 #11
0
from chainer.functions.array.reshape import Reshape
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Reshape, 'apply'):
    Reshape.apply = patched_function_apply(Reshape.apply)
else:
    Reshape.__call__ = patched_function_call(Reshape.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Reshape([x.name for x in inputs], [x.name for x in outputs],
                         shape=self.shape)


Reshape.to_nnoir_node = to_nnoir_node
コード例 #12
0
from chainer.functions.activation.softmax import Softmax
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Softmax, 'apply'):
    Softmax.apply = patched_function_apply(Softmax.apply)
else:
    Softmax.__call__ = patched_function_call(Softmax.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Softmax([x.name for x in inputs], [x.name for x in outputs],
                         axis=self.axis)


Softmax.to_nnoir_node = to_nnoir_node
コード例 #13
0
from chainer.functions.noise.dropout import Dropout
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Dropout, 'apply'):
    Dropout.apply = patched_function_apply(Dropout.apply)
else:
    Dropout.__call__ = patched_function_call(Dropout.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Dropout([x.name for x in inputs], [x.name for x in outputs])


Dropout.to_nnoir_node = to_nnoir_node
コード例 #14
0
from chainer.functions.array.pad import Pad
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR
import numpy as np

if hasattr(Pad, 'apply'):
    Pad.apply = patched_function_apply(Pad.apply)
else:
    Pad.__call__ = patched_function_call(Pad.__call__)


def to_nnoir_node(self, inputs, outputs):
    [var] = self.chainer_input_variables
    zero_padded_ones = np.pad(np.ones(var.shape, dtype=np.int32),
                              self.pad_width,
                              mode='constant',
                              constant_values=0)
    ones_positions = np.transpose(np.where(zero_padded_ones > 0))
    pad_befores = ones_positions[0]
    pad_afters = np.array(zero_padded_ones.shape) - ones_positions[-1] - 1
    pad_width = list(
        map(lambda x: (int(x[0]), int(x[1])),
            np.stack([pad_befores, pad_afters]).T))
    if self.mode == 'constant':
        if type(self.keywords['constant_values']) == int:
            value = float(self.keywords['constant_values'])
        elif type(self.keywords['constant_values'][0]) == tuple:
            value = float(self.keywords['constant_values'][0][0])
            for (b, a) in self.keywords['constant_values']:
                if b != value or a != value:
                    raise Exception('unsupported pad value "{}"'.format(
コード例 #15
0
from chainer.functions.activation.leaky_relu import LeakyReLU
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(LeakyReLU, 'apply'):
    LeakyReLU.apply = patched_function_apply(LeakyReLU.apply)
else:
    LeakyReLU.__call__ = patched_function_call(LeakyReLU.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.LeakyReLU([x.name for x in inputs], [x.name for x in outputs],
                           slope=self.slope)


LeakyReLU.to_nnoir_node = to_nnoir_node
コード例 #16
0
from chainer.functions.normalization.local_response_normalization import LocalResponseNormalization
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(LocalResponseNormalization, 'apply'):
    LocalResponseNormalization.apply = patched_function_apply(
        LocalResponseNormalization.apply)
else:
    LocalResponseNormalization.__call__ = patched_function_call(
        LocalResponseNormalization.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.LocalResponseNormalization([x.name for x in inputs],
                                            [x.name for x in outputs],
                                            n=int(self.n),
                                            k=float(self.k),
                                            alpha=float(self.alpha),
                                            beta=float(self.beta))


LocalResponseNormalization.to_nnoir_node = to_nnoir_node
コード例 #17
0
from chainer.functions.loss.softmax_cross_entropy import SoftmaxCrossEntropy
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(SoftmaxCrossEntropy, 'apply'):
    SoftmaxCrossEntropy.apply = patched_function_apply(
        SoftmaxCrossEntropy.apply)
else:
    SoftmaxCrossEntropy.__call__ = patched_function_call(
        SoftmaxCrossEntropy.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.SoftmaxCrossEntropy(
        [x.name for x in inputs],
        [x.name for x in outputs],
        normalize=self.normalize,
        cache_score=self.cache_score,
    )


SoftmaxCrossEntropy.to_nnoir_node = to_nnoir_node
コード例 #18
0
from chainer.functions.array.concat import Concat
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(Concat, 'apply'):
    Concat.apply = patched_function_apply(Concat.apply)
else:
    Concat.__call__ = patched_function_call(Concat.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Concat([x.name for x in inputs], [x.name for x in outputs],
                        axis=self.axis)


Concat.to_nnoir_node = to_nnoir_node
コード例 #19
0
from chainer.functions.array.resize_images import ResizeImages
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(ResizeImages, 'apply'):
    ResizeImages.apply = patched_function_apply(ResizeImages.apply)
else:
    ResizeImages.__call__ = patched_function_call(ResizeImages.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.Bilinear2D([x.name for x in inputs], [x.name for x in outputs], size=(self.out_H, self.out_W))


ResizeImages.to_nnoir_node = to_nnoir_node
コード例 #20
0
ファイル: broadcast.py プロジェクト: yongjiankuang/nnoir
from chainer.functions.array.broadcast import BroadcastTo
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(BroadcastTo, 'apply'):
    BroadcastTo.apply = patched_function_apply(BroadcastTo.apply)
else:
    BroadcastTo.__call__ = patched_function_call(BroadcastTo.__call__)


def to_nnoir_node(self, inputs, outputs):
    output = outputs[0]
    return NNOIR.BroadcastTo([x.name for x in inputs],
                             [x.name for x in outputs],
                             shape=outputs.shape)


BroadcastTo.to_nnoir_node = to_nnoir_node
コード例 #21
0
ファイル: elu.py プロジェクト: yongjiankuang/nnoir
from chainer.functions.activation.elu import ELU
from nnoir_chainer.patch import patched_function_apply, patched_function_call
import nnoir.functions as NNOIR

if hasattr(ELU, 'apply'):
    ELU.apply = patched_function_apply(ELU.apply)
else:
    ELU.__call__ = patched_function_call(ELU.__call__)


def to_nnoir_node(self, inputs, outputs):
    return NNOIR.ELU([x.name for x in inputs], [x.name for x in outputs],
                     alpha=self.alpha)


ELU.to_nnoir_node = to_nnoir_node