Example #1
0
    def __init__(self,
                 input_layer,
                 hidden_size=1,
                 identity_scale=1.,
                 input_weight_std=1e-3,
                 reverse_seq=False,
                 name=None):

        if type(input_layer) is PythonWrapper.Irnn:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        if hidden_size <= 0:
            raise ValueError('The `hidden_size` must be > 0.')

        if identity_scale < 0:
            raise ValueError('The `identity_scale` must be > 0.')

        if input_weight_std < 0:
            raise ValueError('The `input_weight_std` must be > 0.')

        internal = PythonWrapper.Irnn(str(name), layers, outputs,
                                      int(hidden_size), float(identity_scale),
                                      float(input_weight_std),
                                      bool(reverse_seq))
        super().__init__(internal)
    def __init__(self,
                 input_layer,
                 head_count,
                 hidden_size,
                 output_size,
                 dropout_rate,
                 name=None):

        if type(input_layer) is PythonWrapper.MultiheadAttention:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, (3, 4))

        if head_count < 1:
            raise ValueError('The `head_count` must be > 0.')

        if hidden_size < 1:
            raise ValueError('The `hidden_size` must be > 0.')

        if output_size < 1:
            raise ValueError('The `output_size` must be > 0.')

        internal = PythonWrapper.MultiheadAttention(str(name), layers, outputs,
                                                    int(head_count),
                                                    int(hidden_size),
                                                    int(output_size),
                                                    dropout_rate)
        super().__init__(internal)
Example #3
0
    def __init__(self,
                 input_layer,
                 class_count,
                 padding=0,
                 dropout_rate=0.0,
                 name=None):

        if type(input_layer) is PythonWrapper.Crf:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, (1, 2))

        if class_count < 1:
            raise ValueError('The `class_count` must be > 0.')

        if padding < 0:
            raise ValueError('The `padding` must be >= 0.')

        if float(dropout_rate) < 0 or float(dropout_rate) >= 1:
            raise ValueError('The `dropout_rate` must be in [0, 1).')

        internal = PythonWrapper.Crf(str(name), layers, outputs, class_count,
                                     padding, dropout_rate)
        super().__init__(internal)
Example #4
0
    def __init__(self, input_layer, transforms, name=None):

        if type(input_layer) is PythonWrapper.Transform:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        if len(transforms) != 7:
            raise ValueError('The `transforms` array must have 7 elements.')

        operations = numpy.ones(7, numpy.int32)
        parameters = numpy.ones(7, numpy.int32)
        for i in range(len(transforms)):
            if len(transforms[i]) != 2:
                raise ValueError(
                    'The `transforms` array must contain pairs (operation, value).'
                )

            operations[i] = self.rules.index(transforms[i][0])
            if transforms[i][1] < 0:
                raise ValueError('All values in `transforms` must be >= 0.')
            parameters[i] = transforms[i][1]

        internal = PythonWrapper.Transform(str(name), layers[0],
                                           int(outputs[0]), operations,
                                           parameters)
        super().__init__(internal)
Example #5
0
    def __init__(self,
                 input_layer,
                 hidden_size=1,
                 dropout_rate=0.,
                 reverse_sequence=False,
                 activation='relu',
                 name=None):

        if type(input_layer) is PythonWrapper.IndRnn:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        if hidden_size <= 0:
            raise ValueError('The `hidden_size` must be > 0.')
        if dropout_rate >= 1.:
            raise ValueError('The `dropout_rate` must be < 1.')
        if activation != 'sigmoid' and activation != 'relu':
            raise ValueError(
                'The `activation` must be one of {`sigmoid`, `relu`}')

        internal = PythonWrapper.IndRnn(str(name), layers, outputs,
                                        int(hidden_size), float(dropout_rate),
                                        bool(reverse_sequence),
                                        str(activation))
        super().__init__(internal)
Example #6
0
    def __init__(self,
                 input_layer,
                 hidden_size=1,
                 dropout_rate=0.0,
                 activation="sigmoid",
                 reverse_seq=False,
                 name=None):

        if type(input_layer) is PythonWrapper.Lstm:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, (1, 3))

        if hidden_size <= 0:
            raise ValueError('The `hidden_size` must be > 0.')

        if dropout_rate < 0 or dropout_rate >= 1:
            raise ValueError('The `dropout_rate` must be in [0, 1).')

        recurrent_activation_index = self.activations.index(activation)

        internal = PythonWrapper.Lstm(str(name), layers, outputs,
                                      int(hidden_size), float(dropout_rate),
                                      recurrent_activation_index,
                                      bool(reverse_seq))
        super().__init__(internal)
Example #7
0
    def __init__(self,
                 input_layers,
                 score,
                 hidden_size,
                 output_object_size,
                 output_seq_len,
                 name=None):

        if type(input_layers) is PythonWrapper.AttentionDecoder:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 2)

        score_index = self.scores.index(score)

        if output_object_size <= 0:
            raise ValueError('`output_object_size` must be > 0.')

        if output_seq_len <= 0:
            raise ValueError('`output_seq_len` must be > 0.')

        if hidden_size <= 0:
            raise ValueError('`hidden_size` must be > 0.')

        internal = PythonWrapper.AttentionDecoder(str(name), layers[0],
                                                  int(outputs[0]), layers[1],
                                                  int(outputs[1]), score_index,
                                                  int(output_object_size),
                                                  int(output_seq_len),
                                                  int(hidden_size))
        super().__init__(internal)
Example #8
0
    def __init__(self, input_layers, dimensions=None, name=None):

        if dimensions is None:
            dimensions = [(1, 1)]

        if not type(dimensions) is list:
            raise ValueError(
                '`dimensions` must be a list of elements like (VectorCount, VectorSize).'
            )

        if any(not type(d) is tuple for d in dimensions):
            raise ValueError(
                '`dimensions` must be a list of elements like (VectorCount, VectorSize).'
            )

        if any(len(d) != 2 for d in dimensions):
            raise ValueError(
                '`dimensions` must be a list of elements like (VectorCount, VectorSize).'
            )

        if any(d[0] < 0 or d[1] < 1 for d in dimensions):
            raise ValueError(
                '`dimensions` must be a list of elements like (VectorCount, VectorSize).'
            )

        if type(input_layers) is PythonWrapper.MultichannelLookup:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 0)

        internal = PythonWrapper.MultichannelLookup(str(name), layers, outputs,
                                                    dimensions)
        super().__init__(internal)
Example #9
0
    def __init__(self, input_layers, head_count=1, hidden_size=1, dropout=0., feed_forward_size=1, activation='relu', name=None):

        if type(input_layers) is PythonWrapper.TransformerEncoder:
            super().__init__(input_layers)
            return

        if head_count < 1:
            raise ValueError('The `head_count` must be > 0.')

        if hidden_size < 1:
            raise ValueError('The `hidden_size` must be > 0.')
        
        if hidden_size % head_count != 0:
            raise ValueError('The `hidden_size` must be a multiple of `head_count`')

        if dropout >= 1.:
            raise ValueError('The `dropout` must be < 1.')

        if feed_forward_size < 1:
            raise ValueError('The `feed_forward_size` must be > 0.')

        if activation not in self.activations:
            raise ValueError('The `activation` has invalid value')

        activation = self.activations.index(activation)
 
        layers, outputs = check_input_layers(input_layers, (1, 2))

        internal = PythonWrapper.TransformerEncoder(str(name), layers, outputs,
            int(head_count), int(hidden_size), float(dropout), int(feed_forward_size), int(activation))
        super().__init__(internal)
Example #10
0
    def __init__(self, input_layer, name=None):
        if type(input_layer) is PythonWrapper.BertConv:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 2)

        internal = PythonWrapper.BertConv(str(name), layers, outputs)
        super().__init__(internal)
Example #11
0
    def __init__(self,
                 input_layers,
                 pooling_type='f',
                 hidden_size=1,
                 window_size=1,
                 stride=1,
                 paddings=(0, 0),
                 activation="tanh",
                 dropout=0.0,
                 mode="direct",
                 name=None):

        if type(input_layers) is PythonWrapper.Qrnn:
            super().__init__(input_layers)
            return

        pooling_type_index = self.pooling_types.index(pooling_type)

        if hidden_size < 1:
            raise ValueError('The `hidden_size` must be > 0.')

        if window_size < 1:
            raise ValueError('The `window_size` must be > 0.')

        if stride < 1:
            raise ValueError('The `stride` must be > 0.')

        if len(paddings) != 2:
            raise ValueError(
                'The `paddings` must have two values (padding_front, padding_back).'
            )

        padding_front = paddings[0]
        if padding_front < 0:
            raise ValueError('The `padding_front` must be >= 0.')

        padding_back = paddings[1]
        if padding_back < 0:
            raise ValueError('The `padding_back` must be >= 0.')

        activation_index = self.activations.index(activation)

        if dropout < 0 or dropout >= 1:
            raise ValueError('The `dropout` must be in [0, 1).')

        mode_index = self.recurrent_modes.index(mode)

        layers, outputs = check_input_layers(input_layers, (1, 2))

        internal = PythonWrapper.Qrnn(str(name),
                                      layers, int(pooling_type_index),
                                      int(hidden_size), int(window_size),
                                      int(stride), int(padding_front),
                                      int(padding_back), activation_index,
                                      float(dropout), mode_index, outputs)
        super().__init__(internal)
    def __init__(self, input_layers, name=None):

        if type(input_layers) is PythonWrapper.MatrixMultiplication:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 2)

        internal = PythonWrapper.MatrixMultiplication(str(name), layers[0], layers[1], int(outputs[0]), int(outputs[1]))
        super().__init__(internal)
Example #13
0
    def __init__(self, input_layer, alpha, name=None):

        if type(input_layer) is PythonWrapper.LeakyReLU:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.LeakyReLU(str(name), layers[0], int(outputs[0]), float(alpha))
        super().__init__(internal)
Example #14
0
    def __init__(self, input_layers, name=None):

        if type(input_layers) is PythonWrapper.GlobalMeanPooling:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 1)

        internal = PythonWrapper.GlobalMeanPooling(str(name), layers[0], outputs[0])
        super().__init__(internal)
Example #15
0
    def __init__(self, input_layer, multiplier, free_term, name=None):

        if type(input_layer) is PythonWrapper.LinearLayer:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.LinearLayer(str(name), layers[0], int(outputs[0]), float(multiplier), float(free_term))
        super().__init__(internal)
Example #16
0
    def __init__(self, input_layers, softmax=True, loss_weight=1.0, name=None):

        if type(input_layers) is PythonWrapper.CrossEntropyLoss:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, (2, 3))

        internal = PythonWrapper.CrossEntropyLoss(str(name), layers, outputs, bool(softmax), float(loss_weight))
        super().__init__(internal)
Example #17
0
    def __init__(self, input_layer, exponent, name=None):

        if type(input_layer) is PythonWrapper.Power:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.Power(str(name), layers[0], int(outputs[0]), float(exponent))
        super().__init__(internal)
Example #18
0
    def __init__(self, input_layer, name=None):

        if type(input_layer) is PythonWrapper.Sink:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.Sink(str(name), layers[0], int(outputs[0]))
        super().__init__(internal)
Example #19
0
    def __init__(self, input_layers, class_count, rate, loss_weight=1.0, name=None):

        if type(input_layers) is PythonWrapper.CenterLoss:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, (2, 3))

        internal = PythonWrapper.CenterLoss(str(name), layers, outputs, int(class_count), float(rate), float(loss_weight))
        super().__init__(internal)
Example #20
0
    def __init__(self, input_layer, slope, bias, name=None):

        if type(input_layer) is PythonWrapper.HardSigmoid:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.HardSigmoid(str(name), layers[0], int(outputs[0]), float(slope), float(bias))
        super().__init__(internal)
Example #21
0
    def __init__(self, input_layers, positive_weight=1.0, loss_weight=1.0, name=None):

        if type(input_layers) is PythonWrapper.BinaryCrossEntropyLoss:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, (2, 3))

        internal = PythonWrapper.BinaryCrossEntropyLoss(str(name), layers, outputs, float(positive_weight), float(loss_weight))
        super().__init__(internal)
Example #22
0
    def __init__(self, input_layers, reset=True, name=None):

        if type(input_layers) is PythonWrapper.Accuracy:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 2)

        internal = PythonWrapper.Accuracy(str(name), layers[0], layers[1], int(outputs[0]), int(outputs[1]), bool(reset))
        super().__init__(internal)
Example #23
0
    def __init__(self, input_layers, loss_weight=1.0, name=None):

        if type(input_layers) is PythonWrapper.MultiSquaredHingeLoss:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, (2, 3))

        internal = PythonWrapper.MultiSquaredHingeLoss(str(name), layers, outputs, float(loss_weight))
        super().__init__(internal)
Example #24
0
    def __init__(self, input_layers, name=None):

        if type(input_layers) is PythonWrapper.EltwiseMul:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 0)

        internal = PythonWrapper.EltwiseMul(str(name), layers, outputs)
        super().__init__(internal)
Example #25
0
    def __init__(self, input_layer, threshold=0.0, name=None):

        if type(input_layer) is PythonWrapper.ReLU:
            super().__init__(input_layer)
            return  

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.ReLU(str(name), layers[0], int(outputs[0]), float(threshold))
        super().__init__(internal)
Example #26
0
    def __init__(self, input_layers, name=None):

        if type(input_layers) is PythonWrapper.BestSequence:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 2)

        internal = PythonWrapper.BestSequence(str(name), layers, outputs)
        super().__init__(internal)
Example #27
0
    def __init__(self, input_layers, name=None):

        if type(input_layers) is PythonWrapper.AddToObject:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, 2)

        internal = PythonWrapper.AddToObject(str(name), layers[0], layers[1],
                                             int(outputs[0]), int(outputs[1]))
        super().__init__(internal)
Example #28
0
    def __init__(self, input_layer, enum_size, name=None):

        if type(input_layer) is PythonWrapper.EnumBinarization:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 1)

        internal = PythonWrapper.EnumBinarization(str(name), layers[0],
                                                  int(outputs[0]), enum_size)
        super().__init__(internal)
Example #29
0
    def __init__(self, input_layer, name=None):

        if type(input_layer) is PythonWrapper.ImageToPixel:
            super().__init__(input_layer)
            return

        layers, outputs = check_input_layers(input_layer, 2)

        internal = PythonWrapper.ImageToPixel(str(name), layers[0], layers[1],
                                              int(outputs[0]), int(outputs[1]))
        super().__init__(internal)
Example #30
0
    def __init__(self, input_layers, blank, skip, loss_weight=1.0, name=None):

        if type(input_layers) is PythonWrapper.CtcLoss:
            super().__init__(input_layers)
            return

        layers, outputs = check_input_layers(input_layers, (2, 5))

        internal = PythonWrapper.CtcLoss(str(name), layers, outputs,
                                         int(blank), bool(skip),
                                         float(loss_weight))
        super().__init__(internal)