Ejemplo n.º 1
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.pre_context_embedder = ContextEmbedder(
            config.pre_embedder, name='pre_context_embedder')
        self.post_context_embedder = ContextEmbedder(
            config.post_embedder, name='post_context_embedder')

        in1 = 2 + sum(x[2] for x in config.pre_embedder.dim_embeddings)
        self.input_to_rec = MLP(activations=[Tanh()],
                                dims=[in1, config.hidden_state_dim],
                                name='input_to_rec')

        self.rec = LSTM(dim=config.hidden_state_dim, name='recurrent')

        in2 = config.hidden_state_dim + sum(
            x[2] for x in config.post_embedder.dim_embeddings)
        self.rec_to_output = MLP(activations=[Tanh()],
                                 dims=[in2, 2],
                                 name='rec_to_output')

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.context = self.pre_context_embedder.inputs + self.post_context_embedder.inputs
        self.inputs = self.sequences + self.context
        self.children = [
            self.pre_context_embedder, self.post_context_embedder,
            self.input_to_rec, self.rec, self.rec_to_output
        ]

        self.initial_state_ = shared_floatx_zeros((config.hidden_state_dim, ),
                                                  name="initial_state")
        self.initial_cells = shared_floatx_zeros((config.hidden_state_dim, ),
                                                 name="initial_cells")
Ejemplo n.º 2
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.pre_context_embedder = ContextEmbedder(config.pre_embedder, name='pre_context_embedder')
        self.post_context_embedder = ContextEmbedder(config.post_embedder, name='post_context_embedder')

        in1 = 2 + sum(x[2] for x in config.pre_embedder.dim_embeddings)
        self.input_to_rec = MLP(activations=[Tanh()], dims=[in1, config.hidden_state_dim], name='input_to_rec')

        self.rec = LSTM(
                dim = config.hidden_state_dim,
                name = 'recurrent'
            )

        in2 = config.hidden_state_dim + sum(x[2] for x in config.post_embedder.dim_embeddings)
        self.rec_to_output = MLP(activations=[Tanh()], dims=[in2, 2], name='rec_to_output')

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.context = self.pre_context_embedder.inputs + self.post_context_embedder.inputs
        self.inputs = self.sequences + self.context
        self.children = [ self.pre_context_embedder, self.post_context_embedder, self.input_to_rec, self.rec, self.rec_to_output ]

        self.initial_state_ = shared_floatx_zeros((config.hidden_state_dim,),
                name="initial_state")
        self.initial_cells = shared_floatx_zeros((config.hidden_state_dim,),
                name="initial_cells")
Ejemplo n.º 3
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        self.mlp = MLP(activations=[Rectifier()
                                    for _ in config.dim_hidden] + [Identity()],
                       dims=[config.dim_input] + config.dim_hidden +
                       [config.dim_output])

        self.inputs = self.context_embedder.inputs  # + self.extremities.keys()
        self.children = [self.context_embedder, self.mlp]
Ejemplo n.º 4
0
    def __init__(self, config, output_layer=None, **kwargs):
        super(FFMLP, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        output_activation = [] if output_layer is None else [output_layer()]
        output_dim = [] if output_layer is None else [config.dim_output]
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + output_activation,
                       dims=[config.dim_input] + config.dim_hidden + output_dim)

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis for side in ['first', 'last'] for axis in [0, 1]}
        self.inputs = self.context_embedder.inputs + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]
Ejemplo n.º 5
0
class FFMLP(Initializable):
    def __init__(self, config, output_layer=None, **kwargs):
        super(FFMLP, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        output_activation = [] if output_layer is None else [output_layer()]
        output_dim = [] if output_layer is None else [config.dim_output]
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + output_activation,
                       dims=[config.dim_input] + config.dim_hidden + output_dim)

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis for side in ['first', 'last'] for axis in [0, 1]}
        self.inputs = self.context_embedder.inputs + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]

    def _push_initialization_config(self):
        self.mlp.weights_init = self.config.mlp_weights_init
        self.mlp.biases_init = self.config.mlp_biases_init

    @application(outputs=['prediction'])
    def predict(self, **kwargs):
        embeddings = tuple(self.context_embedder.apply(**{k: kwargs[k] for k in self.context_embedder.inputs }))
        extremities = tuple((kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v] for k, v in self.extremities.items())

        inputs = tensor.concatenate(extremities + embeddings, axis=1)
        outputs = self.mlp.apply(inputs)

        return outputs

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs
Ejemplo n.º 6
0
class FFMLP(Initializable):
    def __init__(self, config, output_layer=None, **kwargs):
        super(FFMLP, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        output_activation = [] if output_layer is None else [output_layer()]
        output_dim = [] if output_layer is None else [config.dim_output]
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + output_activation,
                       dims=[config.dim_input] + config.dim_hidden + output_dim)

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis for side in ['first', 'last'] for axis in [0, 1]}
        self.inputs = self.context_embedder.inputs + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]

    def _push_initialization_config(self):
        self.mlp.weights_init = self.config.mlp_weights_init
        self.mlp.biases_init = self.config.mlp_biases_init

    @application(outputs=['prediction'])
    def predict(self, **kwargs):
        embeddings = tuple(self.context_embedder.apply(**{k: kwargs[k] for k in self.context_embedder.inputs }))
        extremities = tuple((kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v] for k, v in self.extremities.items())

        inputs = tensor.concatenate(extremities + embeddings, axis=1)
        outputs = self.mlp.apply(inputs)

        return outputs

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs
Ejemplo n.º 7
0
    def __init__(self, config, output_dim, activation, **kwargs):
        super(MLPEncoder, self).__init__(**kwargs)

        self.config = config
        self.context_embedder = ContextEmbedder(self.config)

        self.encoder_mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden]
                                           + [activation()],
                               dims=[config.dim_input]
                                    + config.dim_hidden
                                    + [output_dim],
                               name='encoder')

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis 
                             for side in ['first', 'last'] for axis in [0, 1]}

        self.children = [ self.context_embedder,
                          self.encoder_mlp ]
Ejemplo n.º 8
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Identity()],
                       dims=[config.dim_input] + config.dim_hidden + [config.dim_output])

        self.inputs = self.context_embedder.inputs # + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]
Ejemplo n.º 9
0
    def __init__(self, config, output_dim=2, **kwargs):
        super(BidiRNN, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        act = config.rec_activation() if hasattr(config,
                                                 'rec_activation') else None
        self.rec = SegregatedBidirectional(
            LSTM(dim=config.hidden_state_dim, activation=act,
                 name='recurrent'))

        self.fwd_fork = Fork([
            name
            for name in self.rec.prototype.apply.sequences if name != 'mask'
        ],
                             prototype=Linear(),
                             name='fwd_fork')
        self.bkwd_fork = Fork([
            name
            for name in self.rec.prototype.apply.sequences if name != 'mask'
        ],
                              prototype=Linear(),
                              name='bkwd_fork')

        rto_in = config.hidden_state_dim * 2 + sum(
            x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(
            activations=[Rectifier()
                         for _ in config.dim_hidden] + [Identity()],
            dims=[rto_in] + config.dim_hidden + [output_dim])

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.inputs = self.sequences + self.context_embedder.inputs

        self.children = [
            self.context_embedder, self.fwd_fork, self.bkwd_fork, self.rec,
            self.rec_to_output
        ]
Ejemplo n.º 10
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        self.prefix_encoder = MLP(activations=[
            Rectifier() for _ in config.prefix_encoder.dim_hidden
        ] + [config.representation_activation()],
                                  dims=[config.prefix_encoder.dim_input] +
                                  config.prefix_encoder.dim_hidden +
                                  [config.representation_size],
                                  name='prefix_encoder')
        self.candidate_encoder = MLP(
            activations=[
                Rectifier() for _ in config.candidate_encoder.dim_hidden
            ] + [config.representation_activation()],
            dims=[config.candidate_encoder.dim_input] +
            config.candidate_encoder.dim_hidden + [config.representation_size],
            name='candidate_encoder')
        self.softmax = Softmax()

        self.prefix_extremities = {
            '%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
            for side in ['first', 'last'] for axis in [0, 1]
        }
        self.candidate_extremities = {
            'candidate_%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
            for side in ['first', 'last'] for axis in [0, 1]
        }

        self.inputs = self.context_embedder.inputs + [
            'candidate_%s' % k for k in self.context_embedder.inputs
        ] + self.prefix_extremities.keys() + self.candidate_extremities.keys()
        self.children = [
            self.context_embedder, self.prefix_encoder, self.candidate_encoder,
            self.softmax
        ]
Ejemplo n.º 11
0
    def __init__(self, config, output_layer=None, **kwargs):
        super(FFMLP, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        output_activation = [] if output_layer is None else [output_layer()]
        output_dim = [] if output_layer is None else [config.dim_output]
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + output_activation,
                       dims=[config.dim_input] + config.dim_hidden + output_dim)

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis for side in ['first', 'last'] for axis in [0, 1]}
        self.inputs = self.context_embedder.inputs + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]
Ejemplo n.º 12
0
class Model(Initializable):
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        self.mlp = MLP(activations=[Rectifier()
                                    for _ in config.dim_hidden] + [Identity()],
                       dims=[config.dim_input] + config.dim_hidden +
                       [config.dim_output])

        self.inputs = self.context_embedder.inputs  # + self.extremities.keys()
        self.children = [self.context_embedder, self.mlp]

    def _push_initialization_config(self):
        self.mlp.weights_init = self.config.mlp_weights_init
        self.mlp.biases_init = self.config.mlp_biases_init

    @application(outputs=['destination'])
    def predict(self, **kwargs):
        embeddings = tuple(
            self.context_embedder.apply(
                **{k: kwargs[k]
                   for k in self.context_embedder.inputs}))

        inputs = tensor.concatenate(embeddings, axis=1)
        outputs = self.mlp.apply(inputs)

        if self.config.output_mode == "destination":
            return data.train_gps_std * outputs + data.train_gps_mean
        elif self.config.dim_output == "clusters":
            return tensor.dot(outputs, self.classes)

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost'])
    def cost(self, **kwargs):
        y_hat = self.predict(**kwargs)
        y = tensor.concatenate((kwargs['destination_latitude'][:, None],
                                kwargs['destination_longitude'][:, None]),
                               axis=1)

        return error.erdist(y_hat, y).mean()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 13
0
class Model(Initializable):
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        self.mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Identity()],
                       dims=[config.dim_input] + config.dim_hidden + [config.dim_output])

        self.inputs = self.context_embedder.inputs # + self.extremities.keys()
        self.children = [ self.context_embedder, self.mlp ]

    def _push_initialization_config(self):
        self.mlp.weights_init = self.config.mlp_weights_init
        self.mlp.biases_init = self.config.mlp_biases_init

    @application(outputs=['destination'])
    def predict(self, **kwargs):
        embeddings = tuple(self.context_embedder.apply(**{k: kwargs[k] for k in self.context_embedder.inputs }))

        inputs = tensor.concatenate(embeddings, axis=1)
        outputs = self.mlp.apply(inputs)

        if self.config.output_mode == "destination":
            return data.train_gps_std * outputs + data.train_gps_mean
        elif self.config.dim_output == "clusters":
            return tensor.dot(outputs, self.classes)

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost'])
    def cost(self, **kwargs):
        y_hat = self.predict(**kwargs)
        y = tensor.concatenate((kwargs['destination_latitude'][:, None],
                                kwargs['destination_longitude'][:, None]), axis=1)

        return error.erdist(y_hat, y).mean()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 14
0
class MLPEncoder(Initializable):
    def __init__(self, config, output_dim, activation, **kwargs):
        super(MLPEncoder, self).__init__(**kwargs)

        self.config = config
        self.context_embedder = ContextEmbedder(self.config)

        self.encoder_mlp = MLP(activations=[Rectifier() for _ in config.dim_hidden]
                                           + [activation()],
                               dims=[config.dim_input]
                                    + config.dim_hidden
                                    + [output_dim],
                               name='encoder')

        self.extremities = {'%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis 
                             for side in ['first', 'last'] for axis in [0, 1]}

        self.children = [ self.context_embedder,
                          self.encoder_mlp ]

    def _push_initialization_config(self):
        for brick in [self.context_embedder, self.encoder_mlp]:
            brick.weights_init = self.config.weights_init
            brick.biases_init = self.config.biases_init

    @application
    def apply(self, **kwargs):
        embeddings = tuple(self.context_embedder.apply(
                           **{k: kwargs[k] for k in self.context_embedder.inputs }))
        extremities = tuple((kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v]
                            for k, v in self.extremities.items())
        inputs = tensor.concatenate(extremities + embeddings, axis=1)

        return self.encoder_mlp.apply(inputs)

    @apply.property('inputs')
    def apply_inputs(self):
        return self.context_embedder.inputs + self.extremities.keys()
Ejemplo n.º 15
0
    def __init__(self, config, output_dim=2, **kwargs):
        super(BidiRNN, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        
        act = config.rec_activation() if hasattr(config, 'rec_activation') else None
        self.rec = SegregatedBidirectional(LSTM(dim=config.hidden_state_dim, activation=act, name='recurrent'))

        self.fwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                             prototype=Linear(), name='fwd_fork')
        self.bkwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                              prototype=Linear(), name='bkwd_fork')

        rto_in = config.hidden_state_dim * 2 + sum(x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Identity()], 
                                 dims=[rto_in] + config.dim_hidden + [output_dim])

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.inputs = self.sequences + self.context_embedder.inputs

        self.children = [ self.context_embedder, self.fwd_fork, self.bkwd_fork,
                          self.rec, self.rec_to_output ]
Ejemplo n.º 16
0
    def __init__(self, config, output_dim, activation, **kwargs):
        super(RecurrentEncoder, self).__init__(**kwargs)

        self.config = config
        self.context_embedder = ContextEmbedder(config)

        self.rec = SegregatedBidirectional(LSTM(dim=config.rec_state_dim, name='encoder_recurrent'))

        self.fwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                             prototype=Linear(), name='fwd_fork')
        self.bkwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                              prototype=Linear(), name='bkwd_fork')

        rto_in = config.rec_state_dim * 2 + sum(x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(
                    activations=[Rectifier() for _ in config.dim_hidden] + [activation],
                    dims=[rto_in] + config.dim_hidden + [output_dim],
                    name='encoder_rto')

        self.children = [self.context_embedder, self.rec, self.fwd_fork, self.bkwd_fork, self.rec_to_output]

        self.rec_inputs = ['latitude', 'longitude', 'latitude_mask']
        self.inputs = self.context_embedder.inputs + self.rec_inputs
Ejemplo n.º 17
0
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        self.prefix_encoder = MLP(
            activations=[Rectifier() for _ in config.prefix_encoder.dim_hidden] + [config.representation_activation()],
            dims=[config.prefix_encoder.dim_input] + config.prefix_encoder.dim_hidden + [config.representation_size],
            name="prefix_encoder",
        )
        self.candidate_encoder = MLP(
            activations=[Rectifier() for _ in config.candidate_encoder.dim_hidden]
            + [config.representation_activation()],
            dims=[config.candidate_encoder.dim_input]
            + config.candidate_encoder.dim_hidden
            + [config.representation_size],
            name="candidate_encoder",
        )
        self.softmax = Softmax()

        self.prefix_extremities = {
            "%s_k_%s" % (side, ["latitude", "longitude"][axis]): axis for side in ["first", "last"] for axis in [0, 1]
        }
        self.candidate_extremities = {
            "candidate_%s_k_%s" % (side, ["latitude", "longitude"][axis]): axis
            for side in ["first", "last"]
            for axis in [0, 1]
        }

        self.inputs = (
            self.context_embedder.inputs
            + ["candidate_%s" % k for k in self.context_embedder.inputs]
            + self.prefix_extremities.keys()
            + self.candidate_extremities.keys()
        )
        self.children = [self.context_embedder, self.prefix_encoder, self.candidate_encoder, self.softmax]
Ejemplo n.º 18
0
class RNN(Initializable):
    @lazy()
    def __init__(self, config, rec_input_len=2, output_dim=2, **kwargs):
        super(RNN, self).__init__(**kwargs)
        self.config = config

        self.pre_context_embedder = ContextEmbedder(config.pre_embedder, name='pre_context_embedder')
        self.post_context_embedder = ContextEmbedder(config.post_embedder, name='post_context_embedder')

        in1 = rec_input_len + sum(x[2] for x in config.pre_embedder.dim_embeddings)
        self.input_to_rec = MLP(activations=[Tanh()], dims=[in1, config.hidden_state_dim], name='input_to_rec')

        self.rec = LSTM(
                dim = config.hidden_state_dim,
                name = 'recurrent'
            )

        in2 = config.hidden_state_dim + sum(x[2] for x in config.post_embedder.dim_embeddings)
        self.rec_to_output = MLP(activations=[Tanh()], dims=[in2, output_dim], name='rec_to_output')

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.context = self.pre_context_embedder.inputs + self.post_context_embedder.inputs
        self.inputs = self.sequences + self.context
        self.children = [ self.pre_context_embedder, self.post_context_embedder, self.input_to_rec, self.rec, self.rec_to_output ]

        self.initial_state_ = shared_floatx_zeros((config.hidden_state_dim,),
                name="initial_state")
        self.initial_cells = shared_floatx_zeros((config.hidden_state_dim,),
                name="initial_cells")

    def _push_initialization_config(self):
        for mlp in [self.input_to_rec, self.rec_to_output]:
            mlp.weights_init = self.config.weights_init
            mlp.biases_init = self.config.biases_init
        self.rec.weights_init = self.config.weights_init

    def get_dim(self, name):
        return self.rec.get_dim(name)

    def process_rto(self, rto):
        return rto

    def rec_input(self, latitude, longitude, **kwargs):
        return (tensor.shape_padright(latitude), tensor.shape_padright(longitude))

    @recurrent(states=['states', 'cells'], outputs=['destination', 'states', 'cells'])
    def predict_all(self, **kwargs):
        pre_emb = tuple(self.pre_context_embedder.apply(**kwargs))

        itr_in = tensor.concatenate(pre_emb + self.rec_input(**kwargs), axis=1)
        itr = self.input_to_rec.apply(itr_in)
        itr = itr.repeat(4, axis=1)
        (next_states, next_cells) = self.rec.apply(itr, kwargs['states'], kwargs['cells'], mask=kwargs['latitude_mask'], iterate=False)

        post_emb = tuple(self.post_context_embedder.apply(**kwargs))
        rto = self.rec_to_output.apply(tensor.concatenate(post_emb + (next_states,), axis=1))

        rto = self.process_rto(rto)
        return (rto, next_states, next_cells)

    @predict_all.property('sequences')
    def predict_all_sequences(self):
        return self.sequences

    @application(outputs=predict_all.states)
    def initial_states(self, *args, **kwargs):
        return self.rec.initial_states(*args, **kwargs)

    @predict_all.property('contexts')
    def predict_all_context(self):
        return self.context

    def before_predict_all(self, kwargs):
        kwargs['latitude'] = (kwargs['latitude'].T - data.train_gps_mean[0]) / data.train_gps_std[0]
        kwargs['longitude'] = (kwargs['longitude'].T - data.train_gps_mean[1]) / data.train_gps_std[1]
        kwargs['latitude_mask'] = kwargs['latitude_mask'].T

    @application(outputs=['destination'])
    def predict(self, **kwargs):
        self.before_predict_all(kwargs)
        res = self.predict_all(**kwargs)[0]

        last_id = tensor.cast(kwargs['latitude_mask'].sum(axis=0) - 1, dtype='int64')
        return res[last_id, tensor.arange(kwargs['latitude_mask'].shape[1])]

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost_matrix'])
    def cost_matrix(self, **kwargs):
        self.before_predict_all(kwargs)

        res = self.predict_all(**kwargs)[0]
        target = tensor.concatenate(
                    (kwargs['destination_latitude'].dimshuffle('x', 0, 'x'),
                     kwargs['destination_longitude'].dimshuffle('x', 0, 'x')),
                axis=2)
        target = target.repeat(kwargs['latitude'].shape[0], axis=0)
        ce = error.erdist(target.reshape((-1, 2)), res.reshape((-1, 2)))
        ce = ce.reshape(kwargs['latitude'].shape)
        return ce * kwargs['latitude_mask']

    @cost_matrix.property('inputs')
    def cost_matrix_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']

    @application(outputs=['cost'])
    def cost(self, latitude_mask, **kwargs):
        return self.cost_matrix(latitude_mask=latitude_mask, **kwargs).sum() / latitude_mask.sum()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']

    @application(outputs=['cost'])
    def valid_cost(self, **kwargs):
        last_id = tensor.cast(kwargs['latitude_mask'].sum(axis=1) - 1, dtype='int64')
        return self.cost_matrix(**kwargs)[last_id, tensor.arange(kwargs['latitude_mask'].shape[0])].mean()

    @valid_cost.property('inputs')
    def valid_cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 19
0
class BidiRNN(Initializable):
    @lazy()
    def __init__(self, config, output_dim=2, **kwargs):
        super(BidiRNN, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        act = config.rec_activation() if hasattr(config,
                                                 'rec_activation') else None
        self.rec = SegregatedBidirectional(
            LSTM(dim=config.hidden_state_dim, activation=act,
                 name='recurrent'))

        self.fwd_fork = Fork([
            name
            for name in self.rec.prototype.apply.sequences if name != 'mask'
        ],
                             prototype=Linear(),
                             name='fwd_fork')
        self.bkwd_fork = Fork([
            name
            for name in self.rec.prototype.apply.sequences if name != 'mask'
        ],
                              prototype=Linear(),
                              name='bkwd_fork')

        rto_in = config.hidden_state_dim * 2 + sum(
            x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(
            activations=[Rectifier()
                         for _ in config.dim_hidden] + [Identity()],
            dims=[rto_in] + config.dim_hidden + [output_dim])

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.inputs = self.sequences + self.context_embedder.inputs

        self.children = [
            self.context_embedder, self.fwd_fork, self.bkwd_fork, self.rec,
            self.rec_to_output
        ]

    def _push_allocation_config(self):
        for i, fork in enumerate([self.fwd_fork, self.bkwd_fork]):
            fork.input_dim = 2
            fork.output_dims = [
                self.rec.children[i].get_dim(name)
                for name in fork.output_names
            ]

    def _push_initialization_config(self):
        for brick in [
                self.fwd_fork, self.bkwd_fork, self.rec, self.rec_to_output
        ]:
            brick.weights_init = self.config.weights_init
            brick.biases_init = self.config.biases_init

    def process_outputs(self, outputs):
        pass  # must be implemented in child class

    @application(outputs=['destination'])
    def predict(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = (latitude.T -
                    data.train_gps_mean[0]) / data.train_gps_std[0]
        longitude = (longitude.T -
                     data.train_gps_mean[1]) / data.train_gps_std[1]
        latitude_mask = latitude_mask.T

        rec_in = tensor.concatenate(
            (latitude[:, :, None], longitude[:, :, None]), axis=2)

        last_id = tensor.cast(latitude_mask.sum(axis=0) - 1, dtype='int64')

        path = self.rec.apply(
            merge(self.fwd_fork.apply(rec_in, as_dict=True),
                  {'mask': latitude_mask}),
            merge(self.bkwd_fork.apply(rec_in, as_dict=True),
                  {'mask': latitude_mask}))[0]

        path_representation = (path[0][:, -self.config.hidden_state_dim:],
                               path[last_id - 1,
                                    tensor.arange(latitude_mask.shape[1])]
                               [:, :self.config.hidden_state_dim])

        embeddings = tuple(
            self.context_embedder.apply(
                **{k: kwargs[k]
                   for k in self.context_embedder.inputs}))

        inputs = tensor.concatenate(path_representation + embeddings, axis=1)
        outputs = self.rec_to_output.apply(inputs)

        return self.process_outputs(outputs)

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost'])
    def cost(self, **kwargs):
        y_hat = self.predict(**kwargs)
        y = tensor.concatenate((kwargs['destination_latitude'][:, None],
                                kwargs['destination_longitude'][:, None]),
                               axis=1)

        return error.erdist(y_hat, y).mean()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 20
0
class Model(Initializable):
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        self.prefix_encoder = MLP(
            activations=[Rectifier() for _ in config.prefix_encoder.dim_hidden] + [config.representation_activation()],
            dims=[config.prefix_encoder.dim_input] + config.prefix_encoder.dim_hidden + [config.representation_size],
            name="prefix_encoder",
        )
        self.candidate_encoder = MLP(
            activations=[Rectifier() for _ in config.candidate_encoder.dim_hidden]
            + [config.representation_activation()],
            dims=[config.candidate_encoder.dim_input]
            + config.candidate_encoder.dim_hidden
            + [config.representation_size],
            name="candidate_encoder",
        )
        self.softmax = Softmax()

        self.prefix_extremities = {
            "%s_k_%s" % (side, ["latitude", "longitude"][axis]): axis for side in ["first", "last"] for axis in [0, 1]
        }
        self.candidate_extremities = {
            "candidate_%s_k_%s" % (side, ["latitude", "longitude"][axis]): axis
            for side in ["first", "last"]
            for axis in [0, 1]
        }

        self.inputs = (
            self.context_embedder.inputs
            + ["candidate_%s" % k for k in self.context_embedder.inputs]
            + self.prefix_extremities.keys()
            + self.candidate_extremities.keys()
        )
        self.children = [self.context_embedder, self.prefix_encoder, self.candidate_encoder, self.softmax]

    def _push_initialization_config(self):
        for (mlp, config) in [
            [self.prefix_encoder, self.config.prefix_encoder],
            [self.candidate_encoder, self.config.candidate_encoder],
        ]:
            mlp.weights_init = config.weights_init
            mlp.biases_init = config.biases_init

    @application(outputs=["destination"])
    def predict(self, **kwargs):
        prefix_embeddings = tuple(self.context_embedder.apply(**{k: kwargs[k] for k in self.context_embedder.inputs}))
        prefix_extremities = tuple(
            (kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v] for k, v in self.prefix_extremities.items()
        )
        prefix_inputs = tensor.concatenate(prefix_extremities + prefix_embeddings, axis=1)
        prefix_representation = self.prefix_encoder.apply(prefix_inputs)
        if self.config.normalize_representation:
            prefix_representation = prefix_representation / tensor.sqrt(
                (prefix_representation ** 2).sum(axis=1, keepdims=True)
            )

        candidate_embeddings = tuple(
            self.context_embedder.apply(**{k: kwargs["candidate_%s" % k] for k in self.context_embedder.inputs})
        )
        candidate_extremities = tuple(
            (kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v] for k, v in self.candidate_extremities.items()
        )
        candidate_inputs = tensor.concatenate(candidate_extremities + candidate_embeddings, axis=1)
        candidate_representation = self.candidate_encoder.apply(candidate_inputs)
        if self.config.normalize_representation:
            candidate_representation = candidate_representation / tensor.sqrt(
                (candidate_representation ** 2).sum(axis=1, keepdims=True)
            )

        similarity_score = tensor.dot(prefix_representation, candidate_representation.T)
        similarity = self.softmax.apply(similarity_score)

        candidate_destination = tensor.concatenate(
            (
                tensor.shape_padright(kwargs["candidate_last_k_latitude"][:, -1]),
                tensor.shape_padright(kwargs["candidate_last_k_longitude"][:, -1]),
            ),
            axis=1,
        )

        return tensor.dot(similarity, candidate_destination)

    @predict.property("inputs")
    def predict_inputs(self):
        return self.inputs

    @application(outputs=["cost"])
    def cost(self, **kwargs):
        y_hat = self.predict(**kwargs)
        y = tensor.concatenate(
            (kwargs["destination_latitude"][:, None], kwargs["destination_longitude"][:, None]), axis=1
        )

        return error.erdist(y_hat, y).mean()

    @cost.property("inputs")
    def cost_inputs(self):
        return self.inputs + ["destination_latitude", "destination_longitude"]
Ejemplo n.º 21
0
class BidiRNN(Initializable):
    @lazy()
    def __init__(self, config, output_dim=2, **kwargs):
        super(BidiRNN, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)
        
        act = config.rec_activation() if hasattr(config, 'rec_activation') else None
        self.rec = SegregatedBidirectional(LSTM(dim=config.hidden_state_dim, activation=act, name='recurrent'))

        self.fwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                             prototype=Linear(), name='fwd_fork')
        self.bkwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                              prototype=Linear(), name='bkwd_fork')

        rto_in = config.hidden_state_dim * 2 + sum(x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(activations=[Rectifier() for _ in config.dim_hidden] + [Identity()], 
                                 dims=[rto_in] + config.dim_hidden + [output_dim])

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.inputs = self.sequences + self.context_embedder.inputs

        self.children = [ self.context_embedder, self.fwd_fork, self.bkwd_fork,
                          self.rec, self.rec_to_output ]

    def _push_allocation_config(self):
        for i, fork in enumerate([self.fwd_fork, self.bkwd_fork]):
            fork.input_dim = 2
            fork.output_dims = [ self.rec.children[i].get_dim(name)
                                 for name in fork.output_names ]

    def _push_initialization_config(self):
        for brick in [self.fwd_fork, self.bkwd_fork, self.rec, self.rec_to_output]:
            brick.weights_init = self.config.weights_init
            brick.biases_init = self.config.biases_init

    def process_outputs(self, outputs):
        pass # must be implemented in child class

    @application(outputs=['destination'])
    def predict(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = (latitude.T - data.train_gps_mean[0]) / data.train_gps_std[0]
        longitude = (longitude.T - data.train_gps_mean[1]) / data.train_gps_std[1]
        latitude_mask = latitude_mask.T

        rec_in = tensor.concatenate((latitude[:, :, None], longitude[:, :, None]), axis=2)

        last_id = tensor.cast(latitude_mask.sum(axis=0) - 1, dtype='int64')

        path = self.rec.apply(merge(self.fwd_fork.apply(rec_in, as_dict=True),
                                    {'mask': latitude_mask}),
                              merge(self.bkwd_fork.apply(rec_in, as_dict=True),
                                    {'mask': latitude_mask}))[0]

        path_representation = (path[0][:, -self.config.hidden_state_dim:],
                               path[last_id - 1, tensor.arange(latitude_mask.shape[1])]
                                   [:, :self.config.hidden_state_dim])

        embeddings = tuple(self.context_embedder.apply(
                        **{k: kwargs[k] for k in self.context_embedder.inputs }))

        inputs = tensor.concatenate(path_representation + embeddings, axis=1)
        outputs = self.rec_to_output.apply(inputs)

        return self.process_outputs(outputs)

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost'])
    def cost(self, **kwargs):
        y_hat = self.predict(**kwargs)
        y = tensor.concatenate((kwargs['destination_latitude'][:, None],
                                kwargs['destination_longitude'][:, None]), axis=1)

        return error.erdist(y_hat, y).mean()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 22
0
class Model(Initializable):
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.context_embedder = ContextEmbedder(config)

        self.prefix_encoder = MLP(activations=[
            Rectifier() for _ in config.prefix_encoder.dim_hidden
        ] + [config.representation_activation()],
                                  dims=[config.prefix_encoder.dim_input] +
                                  config.prefix_encoder.dim_hidden +
                                  [config.representation_size],
                                  name='prefix_encoder')
        self.candidate_encoder = MLP(
            activations=[
                Rectifier() for _ in config.candidate_encoder.dim_hidden
            ] + [config.representation_activation()],
            dims=[config.candidate_encoder.dim_input] +
            config.candidate_encoder.dim_hidden + [config.representation_size],
            name='candidate_encoder')
        self.softmax = Softmax()

        self.prefix_extremities = {
            '%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
            for side in ['first', 'last'] for axis in [0, 1]
        }
        self.candidate_extremities = {
            'candidate_%s_k_%s' % (side, ['latitude', 'longitude'][axis]): axis
            for side in ['first', 'last'] for axis in [0, 1]
        }

        self.inputs = self.context_embedder.inputs + [
            'candidate_%s' % k for k in self.context_embedder.inputs
        ] + self.prefix_extremities.keys() + self.candidate_extremities.keys()
        self.children = [
            self.context_embedder, self.prefix_encoder, self.candidate_encoder,
            self.softmax
        ]

    def _push_initialization_config(self):
        for (mlp, config) in [[
                self.prefix_encoder, self.config.prefix_encoder
        ], [self.candidate_encoder, self.config.candidate_encoder]]:
            mlp.weights_init = config.weights_init
            mlp.biases_init = config.biases_init

    @application(outputs=['destination'])
    def predict(self, **kwargs):
        prefix_embeddings = tuple(
            self.context_embedder.apply(
                **{k: kwargs[k]
                   for k in self.context_embedder.inputs}))
        prefix_extremities = tuple(
            (kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v]
            for k, v in self.prefix_extremities.items())
        prefix_inputs = tensor.concatenate(prefix_extremities +
                                           prefix_embeddings,
                                           axis=1)
        prefix_representation = self.prefix_encoder.apply(prefix_inputs)
        if self.config.normalize_representation:
            prefix_representation = prefix_representation / tensor.sqrt(
                (prefix_representation**2).sum(axis=1, keepdims=True))

        candidate_embeddings = tuple(
            self.context_embedder.apply(
                **{
                    k: kwargs['candidate_%s' % k]
                    for k in self.context_embedder.inputs
                }))
        candidate_extremities = tuple(
            (kwargs[k] - data.train_gps_mean[v]) / data.train_gps_std[v]
            for k, v in self.candidate_extremities.items())
        candidate_inputs = tensor.concatenate(candidate_extremities +
                                              candidate_embeddings,
                                              axis=1)
        candidate_representation = self.candidate_encoder.apply(
            candidate_inputs)
        if self.config.normalize_representation:
            candidate_representation = candidate_representation / tensor.sqrt(
                (candidate_representation**2).sum(axis=1, keepdims=True))

        similarity_score = tensor.dot(prefix_representation,
                                      candidate_representation.T)
        similarity = self.softmax.apply(similarity_score)

        candidate_destination = tensor.concatenate(
            (tensor.shape_padright(kwargs['candidate_last_k_latitude'][:, -1]),
             tensor.shape_padright(kwargs['candidate_last_k_longitude'][:,
                                                                        -1])),
            axis=1)

        return tensor.dot(similarity, candidate_destination)

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost'])
    def cost(self, **kwargs):
        y_hat = self.predict(**kwargs)
        y = tensor.concatenate((kwargs['destination_latitude'][:, None],
                                kwargs['destination_longitude'][:, None]),
                               axis=1)

        return error.erdist(y_hat, y).mean()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 23
0
class Model(Initializable):
    @lazy()
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.pre_context_embedder = ContextEmbedder(config.pre_embedder, name='pre_context_embedder')
        self.post_context_embedder = ContextEmbedder(config.post_embedder, name='post_context_embedder')

        in1 = 2 + sum(x[2] for x in config.pre_embedder.dim_embeddings)
        self.input_to_rec = MLP(activations=[Tanh()], dims=[in1, config.hidden_state_dim], name='input_to_rec')

        self.rec = LSTM(
                dim = config.hidden_state_dim,
                name = 'recurrent'
            )

        in2 = config.hidden_state_dim + sum(x[2] for x in config.post_embedder.dim_embeddings)
        self.rec_to_output = MLP(activations=[Tanh()], dims=[in2, 2], name='rec_to_output')

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.context = self.pre_context_embedder.inputs + self.post_context_embedder.inputs
        self.inputs = self.sequences + self.context
        self.children = [ self.pre_context_embedder, self.post_context_embedder, self.input_to_rec, self.rec, self.rec_to_output ]

        self.initial_state_ = shared_floatx_zeros((config.hidden_state_dim,),
                name="initial_state")
        self.initial_cells = shared_floatx_zeros((config.hidden_state_dim,),
                name="initial_cells")

    def _push_initialization_config(self):
        for mlp in [self.input_to_rec, self.rec_to_output]:
            mlp.weights_init = self.config.weights_init
            mlp.biases_init = self.config.biases_init
        self.rec.weights_init = self.config.weights_init

    def get_dim(self, name):
        return self.rec.get_dim(name)

    @application
    def initial_state(self, *args, **kwargs):
        return self.rec.initial_state(*args, **kwargs)

    @recurrent(states=['states', 'cells'], outputs=['destination', 'states', 'cells'], sequences=['latitude', 'longitude', 'latitude_mask'])
    def predict_all(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = (latitude - data.train_gps_mean[0]) / data.train_gps_std[0]
        longitude = (longitude - data.train_gps_mean[1]) / data.train_gps_std[1]

        pre_emb = tuple(self.pre_context_embedder.apply(**kwargs))
        latitude = tensor.shape_padright(latitude)
        longitude = tensor.shape_padright(longitude)
        itr = self.input_to_rec.apply(tensor.concatenate(pre_emb + (latitude, longitude), axis=1))
        itr = itr.repeat(4, axis=1)
        (next_states, next_cells) = self.rec.apply(itr, kwargs['states'], kwargs['cells'], mask=latitude_mask, iterate=False)

        post_emb = tuple(self.post_context_embedder.apply(**kwargs))
        rto = self.rec_to_output.apply(tensor.concatenate(post_emb + (next_states,), axis=1))

        rto = (rto * data.train_gps_std) + data.train_gps_mean
        return (rto, next_states, next_cells)

    @predict_all.property('contexts')
    def predict_all_inputs(self):
        return self.context

    @application(outputs=['destination'])
    def predict(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = latitude.T
        longitude = longitude.T
        latitude_mask = latitude_mask.T
        res = self.predict_all(latitude, longitude, latitude_mask, **kwargs)[0]
        return res[-1]

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost_matrix'])
    def cost_matrix(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = latitude.T
        longitude = longitude.T
        latitude_mask = latitude_mask.T

        res = self.predict_all(latitude, longitude, latitude_mask, **kwargs)[0]
        target = tensor.concatenate(
                    (kwargs['destination_latitude'].dimshuffle('x', 0, 'x'),
                     kwargs['destination_longitude'].dimshuffle('x', 0, 'x')),
                axis=2)
        target = target.repeat(latitude.shape[0], axis=0)
        ce = error.erdist(target.reshape((-1, 2)), res.reshape((-1, 2)))
        ce = ce.reshape(latitude.shape)
        return ce * latitude_mask

    @cost_matrix.property('inputs')
    def cost_matrix_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']

    @application(outputs=['cost'])
    def cost(self, latitude_mask, **kwargs):
        return self.cost_matrix(latitude_mask=latitude_mask, **kwargs).sum() / latitude_mask.sum()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']

    @application(outputs=['cost'])
    def valid_cost(self, **kwargs):
        # Only works when batch_size is 1.
        return self.cost_matrix(**kwargs)[-1,0]

    @valid_cost.property('inputs')
    def valid_cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']
Ejemplo n.º 24
0
class RecurrentEncoder(Initializable):
    def __init__(self, config, output_dim, activation, **kwargs):
        super(RecurrentEncoder, self).__init__(**kwargs)

        self.config = config
        self.context_embedder = ContextEmbedder(config)

        self.rec = SegregatedBidirectional(LSTM(dim=config.rec_state_dim, name='encoder_recurrent'))

        self.fwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                             prototype=Linear(), name='fwd_fork')
        self.bkwd_fork = Fork([name for name in self.rec.prototype.apply.sequences if name!='mask'],
                              prototype=Linear(), name='bkwd_fork')

        rto_in = config.rec_state_dim * 2 + sum(x[2] for x in config.dim_embeddings)
        self.rec_to_output = MLP(
                    activations=[Rectifier() for _ in config.dim_hidden] + [activation],
                    dims=[rto_in] + config.dim_hidden + [output_dim],
                    name='encoder_rto')

        self.children = [self.context_embedder, self.rec, self.fwd_fork, self.bkwd_fork, self.rec_to_output]

        self.rec_inputs = ['latitude', 'longitude', 'latitude_mask']
        self.inputs = self.context_embedder.inputs + self.rec_inputs

    def _push_allocation_config(self):
        for i, fork in enumerate([self.fwd_fork, self.bkwd_fork]):
            fork.input_dim = 2
            fork.output_dims = [ self.rec.children[i].get_dim(name)
                                 for name in fork.output_names ]

    def _push_initialization_config(self):
        for brick in self.children:
            brick.weights_init = self.config.weights_init
            brick.biases_init = self.config.biases_init

    @application
    def apply(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = (latitude.T - data.train_gps_mean[0]) / data.train_gps_std[0]
        longitude = (longitude.T - data.train_gps_mean[1]) / data.train_gps_std[1]
        latitude_mask = latitude_mask.T

        rec_in = tensor.concatenate((latitude[:, :, None], longitude[:, :, None]),
                                    axis=2)
        path = self.rec.apply(merge(self.fwd_fork.apply(rec_in, as_dict=True),
                                    {'mask': latitude_mask}),
                              merge(self.bkwd_fork.apply(rec_in, as_dict=True),
                                    {'mask': latitude_mask}))[0]

        last_id = tensor.cast(latitude_mask.sum(axis=0) - 1, dtype='int64')
        
        path_representation = (path[0][:, -self.config.rec_state_dim:],
                path[last_id - 1, tensor.arange(last_id.shape[0])]
                    [:, :self.config.rec_state_dim])

        embeddings = tuple(self.context_embedder.apply(
                            **{k: kwargs[k] for k in self.context_embedder.inputs }))

        inputs = tensor.concatenate(path_representation + embeddings, axis=1)
        outputs = self.rec_to_output.apply(inputs)

        return outputs

    @apply.property('inputs')
    def apply_inputs(self):
        return self.inputs
Ejemplo n.º 25
0
class Model(Initializable):
    @lazy()
    def __init__(self, config, **kwargs):
        super(Model, self).__init__(**kwargs)
        self.config = config

        self.pre_context_embedder = ContextEmbedder(
            config.pre_embedder, name='pre_context_embedder')
        self.post_context_embedder = ContextEmbedder(
            config.post_embedder, name='post_context_embedder')

        in1 = 2 + sum(x[2] for x in config.pre_embedder.dim_embeddings)
        self.input_to_rec = MLP(activations=[Tanh()],
                                dims=[in1, config.hidden_state_dim],
                                name='input_to_rec')

        self.rec = LSTM(dim=config.hidden_state_dim, name='recurrent')

        in2 = config.hidden_state_dim + sum(
            x[2] for x in config.post_embedder.dim_embeddings)
        self.rec_to_output = MLP(activations=[Tanh()],
                                 dims=[in2, 2],
                                 name='rec_to_output')

        self.sequences = ['latitude', 'latitude_mask', 'longitude']
        self.context = self.pre_context_embedder.inputs + self.post_context_embedder.inputs
        self.inputs = self.sequences + self.context
        self.children = [
            self.pre_context_embedder, self.post_context_embedder,
            self.input_to_rec, self.rec, self.rec_to_output
        ]

        self.initial_state_ = shared_floatx_zeros((config.hidden_state_dim, ),
                                                  name="initial_state")
        self.initial_cells = shared_floatx_zeros((config.hidden_state_dim, ),
                                                 name="initial_cells")

    def _push_initialization_config(self):
        for mlp in [self.input_to_rec, self.rec_to_output]:
            mlp.weights_init = self.config.weights_init
            mlp.biases_init = self.config.biases_init
        self.rec.weights_init = self.config.weights_init

    def get_dim(self, name):
        return self.rec.get_dim(name)

    @application
    def initial_state(self, *args, **kwargs):
        return self.rec.initial_state(*args, **kwargs)

    @recurrent(states=['states', 'cells'],
               outputs=['destination', 'states', 'cells'],
               sequences=['latitude', 'longitude', 'latitude_mask'])
    def predict_all(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = (latitude - data.train_gps_mean[0]) / data.train_gps_std[0]
        longitude = (longitude -
                     data.train_gps_mean[1]) / data.train_gps_std[1]

        pre_emb = tuple(self.pre_context_embedder.apply(**kwargs))
        latitude = tensor.shape_padright(latitude)
        longitude = tensor.shape_padright(longitude)
        itr = self.input_to_rec.apply(
            tensor.concatenate(pre_emb + (latitude, longitude), axis=1))
        itr = itr.repeat(4, axis=1)
        (next_states, next_cells) = self.rec.apply(itr,
                                                   kwargs['states'],
                                                   kwargs['cells'],
                                                   mask=latitude_mask,
                                                   iterate=False)

        post_emb = tuple(self.post_context_embedder.apply(**kwargs))
        rto = self.rec_to_output.apply(
            tensor.concatenate(post_emb + (next_states, ), axis=1))

        rto = (rto * data.train_gps_std) + data.train_gps_mean
        return (rto, next_states, next_cells)

    @predict_all.property('contexts')
    def predict_all_inputs(self):
        return self.context

    @application(outputs=['destination'])
    def predict(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = latitude.T
        longitude = longitude.T
        latitude_mask = latitude_mask.T
        res = self.predict_all(latitude, longitude, latitude_mask, **kwargs)[0]
        return res[-1]

    @predict.property('inputs')
    def predict_inputs(self):
        return self.inputs

    @application(outputs=['cost_matrix'])
    def cost_matrix(self, latitude, longitude, latitude_mask, **kwargs):
        latitude = latitude.T
        longitude = longitude.T
        latitude_mask = latitude_mask.T

        res = self.predict_all(latitude, longitude, latitude_mask, **kwargs)[0]
        target = tensor.concatenate(
            (kwargs['destination_latitude'].dimshuffle('x', 0, 'x'),
             kwargs['destination_longitude'].dimshuffle('x', 0, 'x')),
            axis=2)
        target = target.repeat(latitude.shape[0], axis=0)
        ce = error.erdist(target.reshape((-1, 2)), res.reshape((-1, 2)))
        ce = ce.reshape(latitude.shape)
        return ce * latitude_mask

    @cost_matrix.property('inputs')
    def cost_matrix_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']

    @application(outputs=['cost'])
    def cost(self, latitude_mask, **kwargs):
        return self.cost_matrix(latitude_mask=latitude_mask, **
                                kwargs).sum() / latitude_mask.sum()

    @cost.property('inputs')
    def cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']

    @application(outputs=['cost'])
    def valid_cost(self, **kwargs):
        # Only works when batch_size is 1.
        return self.cost_matrix(**kwargs)[-1, 0]

    @valid_cost.property('inputs')
    def valid_cost_inputs(self):
        return self.inputs + ['destination_latitude', 'destination_longitude']