def __init__(self, input_layer, hidden_size=1, dropout_rate=0., reverse_sequence=False, activation='relu', name=None): if type(input_layer) is PythonWrapper.IndRnn: super().__init__(input_layer) return layers, outputs = check_input_layers(input_layer, 1) if hidden_size <= 0: raise ValueError('The `hidden_size` must be > 0.') if dropout_rate >= 1.: raise ValueError('The `dropout_rate` must be < 1.') if activation != 'sigmoid' and activation != 'relu': raise ValueError( 'The `activation` must be one of {`sigmoid`, `relu`}') internal = PythonWrapper.IndRnn(str(name), layers, outputs, int(hidden_size), float(dropout_rate), bool(reverse_sequence), str(activation)) super().__init__(internal)
def __init__(self, input_layer, hidden_size=1, dropout_rate=0., reverse_sequence=False, name=None): if type(input_layer) is PythonWrapper.IndRnn: super().__init__(input_layer) return layers, outputs = check_input_layers(input_layer, 1) if hidden_size <= 0: raise ValueError('The `hidden_size` must be > 0.') if dropout_rate >= 1.: raise ValueError('The `dropout_rate` must be < 1.') internal = PythonWrapper.IndRnn(str(name), layers, outputs, int(hidden_size), float(dropout_rate), bool(reverse_sequence)) super().__init__(internal)