Esempio n. 1
0
 def _setup_functions(self):
     self._activation_func = nnprocessors.build_activation(self.activation)
     self._softmax_func = nnprocessors.build_activation('softmax')
     top_rep, self.output_func = self._recursive_func()
     self.monitors.append(("top_rep<0.1", 100 * (abs(top_rep) < 0.1).mean()))
     self.monitors.append(("top_rep<0.9", 100 * (abs(top_rep) < 0.9).mean()))
     self.monitors.append(("top_rep:mean", abs(top_rep).mean()))
Esempio n. 2
0
 def _setup_functions(self):
     self._activation_func = nnprocessors.build_activation(self.activation)
     self._softmax_func = nnprocessors.build_activation('softmax')
     self.hidden_func, self.output_func = self._recurrent_func()
     self.monitors.append(
         ("h<0.1", 100 * (abs(self.hidden_func[-1]) < 0.1).mean()))
     self.monitors.append(
         ("h<0.9", 100 * (abs(self.hidden_func[-1]) < 0.9).mean()))
Esempio n. 3
0
 def _setup_functions(self):
     self._activation_func = nnprocessors.build_activation(self.activation)
     self._softmax_func = nnprocessors.build_activation('softmax')
     top_rep, self.output_func = self._recursive_func()
     self.monitors.append(
         ("top_rep<0.1", 100 * (abs(top_rep) < 0.1).mean()))
     self.monitors.append(
         ("top_rep<0.9", 100 * (abs(top_rep) < 0.9).mean()))
     self.monitors.append(("top_rep:mean", abs(top_rep).mean()))
Esempio n. 4
0
 def _setup_functions(self):
     self._assistive_params = []
     self._activation_func = nnprocessors.build_activation(self.activation)
     self._softmax_func = nnprocessors.build_activation('softmax')
     top_rep, self.output_func = self._recursive_func()
     # self.predict_func, self.predict_updates = self._encode_func()
     self.monitors.append(("top_rep<0.1", 100 * (abs(top_rep) < 0.1).mean()))
     self.monitors.append(("top_rep<0.9", 100 * (abs(top_rep) < 0.9).mean()))
     self.monitors.append(("top_rep:mean", abs(top_rep).mean()))
Esempio n. 5
0
File: rae.py Progetto: Satssuki/nlpy
 def _setup_functions(self):
     self._assistive_params = []
     self._activation_func = nnprocessors.build_activation(self.activation)
     self._softmax_func = nnprocessors.build_activation('softmax')
     top_rep, self.output_func = self._recursive_func()
     # self.predict_func, self.predict_updates = self._encode_func()
     self.monitors.append(("top_rep<0.1", 100 * (abs(top_rep) < 0.1).mean()))
     self.monitors.append(("top_rep<0.9", 100 * (abs(top_rep) < 0.9).mean()))
     self.monitors.append(("top_rep:mean", abs(top_rep).mean()))
Esempio n. 6
0
    def _setup_functions(self):
        self._assistive_params = []
        self._activation_func = nnprocessors.build_activation(self.activation)
        self._softmax_func = nnprocessors.build_activation('softmax')
        self.hidden_func, self.output_func, recurrent_updates = self._recurrent_func()
        self.predict_func, self.predict_updates = self._predict_func()
        self.monitors.append(("hh<0.1", 100 * (abs(self.hidden_func[-1]) < 0.1).mean()))
        self.monitors.append(("hh<0.9", 100 * (abs(self.hidden_func[-1]) < 0.9).mean()))

        self.updates.extend(recurrent_updates.items())
        if self.update_h0:
            self.updates.append((self.h0, ifelse(T.eq(self._vars.k[-1], 0), self.init_h, self.hidden_func[-1])))
        self.params.extend(self._assistive_params)
Esempio n. 7
0
    def _setup_functions(self):
        self._tanh = nnprocessors.build_activation('tanh')
        self._sigmoid = nnprocessors.build_activation('sigmoid')
        self._softmax = nnprocessors.build_activation('softmax')
        [self.output_func, self.hidden_func, self.memory_func], recurrent_updates = self._recurrent_func()
        self.predict_func, self.predict_updates = self._predict_func()
        self.monitors.append(("last_h<0.1", 100 * (abs(self.hidden_func[-1]) < 0.1).mean()))
        self.monitors.append(("last_h<0.9", 100 * (abs(self.hidden_func[-1]) < 0.9).mean()))
        self.monitors.append(("c<0.1", 100 * (abs(self.memory_func[-1]) < 0.1).mean()))
        self.monitors.append(("c<0.9", 100 * (abs(self.memory_func[-1]) < 0.9).mean()))

        self.updates.extend(recurrent_updates)
        if self.update_h0:
            self.updates.append((self.h0, ifelse(T.eq(self._vars.k[-1], 0), self.init_h, self.hidden_func[-1])))
            self.updates.append((self.c0, ifelse(T.eq(self._vars.k[-1], 0), self.init_c, self.memory_func[-1])))
Esempio n. 8
0
    def _setup_functions(self):
        if self.shared_bias:
            self._vars.update_if_not_existing(self.shared_bias, self.B)
        bias = self.B if not self.shared_bias else self._vars.get(
            self.shared_bias)
        if self.disable_bias:
            bias = 0

        self._activation_func = nnprocessors.build_activation(self.activation)
        self.preact_func = T.dot(self.x, self.W) + bias
        self.output_func = nnprocessors.add_noise(
            self._activation_func(self.preact_func), self.noise, self.dropouts)
Esempio n. 9
0
File: layer.py Progetto: zomux/nlpy
    def _setup_functions(self):
        if self.shared_bias:
            self._vars.update_if_not_existing(self.shared_bias, self.B)
        bias = self.B if not self.shared_bias else self._vars.get(self.shared_bias)
        if self.disable_bias:
            bias = 0

        self._activation_func = nnprocessors.build_activation(self.activation)
        self.preact_func = T.dot(self.x, self.W) + bias
        self.output_func = nnprocessors.add_noise(
                self._activation_func(self.preact_func),
                self.noise,
                self.dropouts)
Esempio n. 10
0
File: conv.py Progetto: zomux/nlpy
 def _setup_functions(self):
     self._assistive_params = []
     self._activation_func = nnprocessors.build_activation(self.activation)
     self.output_func = self._output_func()
Esempio n. 11
0
 def _setup_functions(self):
     self._assistive_params = []
     self._activation_func = nnprocessors.build_activation(self.activation)
     self.output_func = self._output_func()
Esempio n. 12
0
 def _setup_functions(self):
     self._activation_func = nnprocessors.build_activation(self.activation)
     self._softmax_func = nnprocessors.build_activation('softmax')
     self.hidden_func, self.output_func = self._recurrent_func()
     self.monitors.append(("h<0.1", 100 * (abs(self.hidden_func[-1]) < 0.1).mean()))
     self.monitors.append(("h<0.9", 100 * (abs(self.hidden_func[-1]) < 0.9).mean()))