Ejemplo n.º 1
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.pre_context_embedder = ContextEmbedder(
            config.pre_embedder, name='pre_context_embedder')
        self.post_context_embedder = ContextEmbedder(
            config.post_embedder, name='post_context_embedder')

        in1 = 2 + sum(x[2] for x in config.pre_embedder.dim_embeddings)
        self.input_to_rec = MLP(activations=[Tanh()],
                                dims=[in1, config.hidden_state_dim],
                                name='input_to_rec')

        self.rec = LSTM(dim=config.hidden_state_dim, name='recurrent')

        in2 = config.hidden_state_dim + sum(
            x[2] for x in config.post_embedder.dim_embeddings)
        self.rec_to_output = MLP(activations=[Tanh()],
                                 dims=[in2, 2],
                                 name='rec_to_output')

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.context = self.pre_context_embedder.inputs + self.post_context_embedder.inputs
        self.inputs = self.sequences + self.context
        self.children = [
            self.pre_context_embedder, self.post_context_embedder,
            self.input_to_rec, self.rec, self.rec_to_output
        ]

        self.initial_state_ = shared_floatx_zeros((config.hidden_state_dim, ),
                                                  name="initial_state")
        self.initial_cells = shared_floatx_zeros((config.hidden_state_dim, ),
                                                 name="initial_cells")
Ejemplo n.º 2
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        self.mlp = MLP(activations=[Rectifier()
                                    for _ in config.dim_hidden] + [Identity()],
                       dims=[config.dim_input] + config.dim_hidden +
                       [config.dim_output])

        self.inputs = self.context_embedder.inputs  # + self.extremities.keys()
        self.children = [self.context_embedder, self.mlp]
Ejemplo n.º 3
0
    def __init__(self, config, output_layer=None, **kwargs):
        super(FFMLP, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        output_activation = [] if output_layer is None else [output_layer()]
        output_dim = [] if output_layer is None else [config.dim_output]
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + output_activation,
                       dims=[config.dim_input] + config.dim_hidden + output_dim)

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis for side in ['first', 'last'] for axis in [0, 1]}
        self.inputs = self.context_embedder.inputs + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]
Ejemplo n.º 4
0
    def __init__(self, config, output_dim, activation, **kwargs):
        super(MLPEncoder, self).__init__(**kwargs)

        self.config = config
        self.context_embedder = ContextEmbedder(self.config)

        self.encoder_mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden]
                                           + [activation()],
                               dims=[config.dim_input]
                                    + config.dim_hidden
                                    + [output_dim],
                               name='encoder')

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis 
                             for side in ['first', 'last'] for axis in [0, 1]}

        self.children = [ self.context_embedder,
                          self.encoder_mlp ]
Ejemplo n.º 5
0
    def __init__(self, config, output_dim=2, **kwargs):
        super(BidiRNN, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        act = config.rec_activation() if hasattr(config,
                                                 'rec_activation') else None
        self.rec = SegregatedBidirectional(
            LSTM(dim=config.hidden_state_dim, activation=act,
                 name='recurrent'))

        self.fwd_fork = Fork([
            name
            for name in self.rec.prototype.apply.sequences if name != 'mask'
        ],
                             prototype=Linear(),
                             name='fwd_fork')
        self.bkwd_fork = Fork([
            name
            for name in self.rec.prototype.apply.sequences if name != 'mask'
        ],
                              prototype=Linear(),
                              name='bkwd_fork')

        rto_in = config.hidden_state_dim * 2 + sum(
            x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(
            activations=[Rectifier()
                         for _ in config.dim_hidden] + [Identity()],
            dims=[rto_in] + config.dim_hidden + [output_dim])

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.inputs = self.sequences + self.context_embedder.inputs

        self.children = [
            self.context_embedder, self.fwd_fork, self.bkwd_fork, self.rec,
            self.rec_to_output
        ]
Ejemplo n.º 6
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        self.prefix_encoder = MLP(activations=[
            Rectifier() for _ in config.prefix_encoder.dim_hidden
        ] + [config.representation_activation()],
                                  dims=[config.prefix_encoder.dim_input] +
                                  config.prefix_encoder.dim_hidden +
                                  [config.representation_size],
                                  name='prefix_encoder')
        self.candidate_encoder = MLP(
            activations=[
                Rectifier() for _ in config.candidate_encoder.dim_hidden
            ] + [config.representation_activation()],
            dims=[config.candidate_encoder.dim_input] +
            config.candidate_encoder.dim_hidden + [config.representation_size],
            name='candidate_encoder')
        self.softmax = Softmax()

        self.prefix_extremities = {
            '%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
            for side in ['first', 'last'] for axis in [0, 1]
        }
        self.candidate_extremities = {
            'candidate_%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
            for side in ['first', 'last'] for axis in [0, 1]
        }

        self.inputs = self.context_embedder.inputs + [
            'candidate_%s' % k for k in self.context_embedder.inputs
        ] + self.prefix_extremities.keys() + self.candidate_extremities.keys()
        self.children = [
            self.context_embedder, self.prefix_encoder, self.candidate_encoder,
            self.softmax
        ]