Ejemplo n.º 1
0
    # The dtype here matches what thinc is expecting -- which differs per
    # platform (by int definition). This should be fixed once the problem
    # is fixed on Thinc's side.
    lengths = ops.asarray([arr.shape[0] for arr in keys], dtype=numpy.int_)
    keys = ops.xp.concatenate(keys)
    vals = ops.allocate(keys.shape[0]) + 1
    return (keys, vals, lengths), None


@describe.on_data(_set_dimensions_if_needed,
                  lambda model, X, y: model.init_weights(model))
@describe.attributes(nI=Dimension("Input size"),
                     nF=Dimension("Number of features"),
                     nO=Dimension("Output size"),
                     nP=Dimension("Maxout pieces"),
                     W=Synapses("Weights matrix", lambda obj:
                                (obj.nF, obj.nO, obj.nP, obj.nI)),
                     b=Biases("Bias vector", lambda obj: (obj.nO, obj.nP)),
                     pad=Synapses("Pad", lambda obj:
                                  (1, obj.nF, obj.nO, obj.nP),
                                  lambda M, ops: ops.normal_init(M, 1.)),
                     d_W=Gradient("W"),
                     d_pad=Gradient("pad"),
                     d_b=Gradient("b"))
class PrecomputableAffine(Model):
    def __init__(self, nO=None, nI=None, nF=None, nP=None, **kwargs):
        Model.__init__(self, **kwargs)
        self.nO = nO
        self.nP = nP
        self.nI = nI
        self.nF = nF
Ejemplo n.º 2
0
        lengths = self.ops.asarray([arr.shape[0] for arr in batch_keys],
                                   dtype=numpy.int_)
        batch_keys = self.ops.xp.concatenate(batch_keys)
        batch_vals = self.ops.asarray(self.ops.xp.concatenate(batch_vals),
                                      dtype="f")
        return (batch_keys, batch_vals, lengths), None


@describe.on_data(_set_dimensions_if_needed,
                  lambda model, X, y: model.init_weights(model))
@describe.attributes(
    nI=Dimension("Input size"),
    nF=Dimension("Number of features"),
    nO=Dimension("Output size"),
    nP=Dimension("Maxout pieces"),
    W=Synapses("Weights matrix", lambda obj: (obj.nF, obj.nO, obj.nP, obj.nI)),
    b=Biases("Bias vector", lambda obj: (obj.nO, obj.nP)),
    pad=Synapses(
        "Pad",
        lambda obj: (1, obj.nF, obj.nO, obj.nP),
        lambda M, ops: ops.normal_init(M, 1.0),
    ),
    d_W=Gradient("W"),
    d_pad=Gradient("pad"),
    d_b=Gradient("b"),
)
class PrecomputableAffine(Model):
    def __init__(self, nO=None, nI=None, nF=None, nP=None, **kwargs):
        Model.__init__(self, **kwargs)
        self.nO = nO
        self.nP = nP
Ejemplo n.º 3
0
        arr[len(doc)] = 0
        seqs.append(arr)
    return seqs, None


from thinc import describe
from thinc.describe import Dimension, Synapses, Gradient
from thinc.neural._lsuv import LSUVinit


@describe.on_data(LSUVinit)
@describe.attributes(
    nM=Dimension("Vector dimensions"),
    nO=Dimension("Size of output"),
    W=Synapses("A projection matrix, to change vector dimensionality",
               lambda obj: (obj.nO, obj.nM),
               lambda W, ops: ops.xavier_uniform_init(W)),
    d_W=Gradient("W"),
)
class SpacyVectors(Model):
    ops = NumpyOps()
    name = 'spacy-vectors'

    def __init__(self, nlp, nO):
        Model.__init__(self)
        self._id_map = {0: 0}
        self.nO = nO
        self.nM = nlp.vocab.vectors_length
        self.nlp = nlp

    @property