Пример #1
0
class RNN(CustomModel):
    def define_layers(self, params):
        self.output_dim = params['output_dim']
        self.rnn = RNNUnrollSubmodel(model=self, gru_hidden_sizes=params['hidden_sizes'],
                                     std_hidden_sizes=[], n_out = params['output_dim'],
                                     recurrent_type=params['recurrent_type'])
        std_layers = {'bistablernn': self.rnn}
        return {**std_layers}

    @tf.function
    def cross_entropy(self, samples, **kwargs):
        observations = samples['groundtruth']
        out = self.rnn.forward(samples['input'])
        return {'cross-entropy': tf.nn.softmax_cross_entropy_with_logits(labels=observations, logits=out)}

    @tf.function
    def accuracy(self, samples):
        observations = samples['groundtruth']
        out = self.rnn.forward(samples['input'])
        return {'accuracy': tf.metrics.categorical_accuracy(y_true=observations, y_pred=out)}

    def pred(self, samples):
        out = self.rnn.forward(samples['input'])
        return {'pred':out}

    def create_optimizer(self, params):
        self.opt = tf.optimizers.Adam(learning_rate=params['lr'])
Пример #2
0
 def define_layers(self, params):
     self.output_dim = params['output_dim']
     self.rnn = RNNUnrollSubmodel(model=self, gru_hidden_sizes=params['hidden_sizes'],
                                  std_hidden_sizes=[], n_out = params['output_dim'],
                                  recurrent_type=params['recurrent_type'])
     std_layers = {'bistablernn': self.rnn}
     return {**std_layers}
Пример #3
0
class RNN(CustomModel):
    def define_layers(self, params):
        self.output_dim = params['output_dim']

        self.rnn = RNNUnrollSubmodel(model=self,
                                     gru_hidden_sizes=params['hidden_sizes'],
                                     std_hidden_sizes=[],
                                     n_out=params['output_dim'],
                                     recurrent_type=params['recurrent_type'])
        std_layers = {'rnn': self.rnn}
        return {**std_layers}

    def mse(self, samples, **kwargs):
        observations = samples['output']
        out = self.rnn.forward(samples['input'])
        return {'MSE': tf.square(observations - out)}

    def pred(self, samples):
        out = self.rnn.forward(samples['input'])
        return {'pred': out}

    def create_optimizer(self, params):
        self.opt = tf.optimizers.Adam(learning_rate=params['lr'])