def test_validation(): model = chain(Relu(10), Relu(10), with_ragged(reduce_max()), Softmax()) with pytest.raises(DataValidationError): model.initialize(X=model.ops.alloc2f(1, 10), Y=model.ops.alloc2f(1, 10)) with pytest.raises(DataValidationError): model.initialize(X=model.ops.alloc3f(1, 10, 1), Y=model.ops.alloc2f(1, 10)) with pytest.raises(DataValidationError): model.initialize(X=[model.ops.alloc2f(1, 10)], Y=model.ops.alloc2f(1, 10))
def build_mean_max_reducer(hidden_size: int) -> Model[Ragged, Floats2d]: """Reduce sequences by concatenating their mean and max pooled vectors, and then combine the concatenated vectors with a hidden layer. """ return chain( concatenate(reduce_last(), reduce_first(), reduce_mean(), reduce_max()), Maxout(nO=hidden_size, normalize=True, dropout=0.0), )
def test_reduce_max(Xs): model = reduce_max() lengths = model.ops.asarray([x.shape[0] for x in Xs], dtype="i") X = Ragged(model.ops.flatten(Xs), lengths) Y, backprop = model(X, is_train=True) assert isinstance(Y, numpy.ndarray) assert Y.shape == (len(Xs), Xs[0].shape[1]) assert Y.dtype == Xs[0].dtype assert list(Y[0]) == list(Xs[0][1]) assert list(Y[1]) == list(Xs[1][1]) dX = backprop(Y) assert dX.dataXd.shape == X.dataXd.shape
from thinc.api import chain, ReLu, reduce_max, Softmax, add bad_model = chain(ReLu(10), reduce_max(), Softmax()) bad_model2 = add(ReLu(10), reduce_max(), Softmax())
from thinc.api import chain, Relu, reduce_max, Softmax, add good_model = chain(Relu(10), Relu(10), Softmax()) reveal_type(good_model) good_model2 = add(Relu(10), Relu(10), Softmax()) reveal_type(good_model2) bad_model_undetected = chain(Relu(10), Relu(10), reduce_max(), Softmax()) reveal_type(bad_model_undetected) bad_model_undetected2 = add(Relu(10), Relu(10), reduce_max(), Softmax()) reveal_type(bad_model_undetected2)
def test_init_reduce_max(): model = reduce_max()