def _get_sog_activation(self, x, s, configs, scope, name): assert isinstance(configs, (list, tuple)) and len(configs) > 0 assert isinstance(self._gate_signals, list) and len( self._gate_signals) == 3 net_u = self._hyper_neuron_16(x, s, self._gate_signals, scope) u = linker.softmax_over_groups(net_u, configs, name) return u
def _get_sog_activation(self, x, s, configs, scope, name): assert isinstance(configs, (list, tuple)) and len(configs) > 0 net_u = self.dense_rn(x, s, scope) if th.sog_version == 0: u = linker.softmax_over_groups(net_u, configs, name) else: u = self._sog_v1(net_u) return u
def _get_coupled_gates(self, x, configs, reverse): assert isinstance(configs, (list, tuple)) # T for transform, C for carry net_T = self.neurons(x, scope='net_u') if len(configs) == 0: T = tf.sigmoid(net_T, name='transform_gate') else: T = linker.softmax_over_groups(net_T, configs, 'transform_gate') C = tf.subtract(1., T, name='carry_gate') if len(configs) > 0 and reverse: T, C = C, T if hub.export_gates: self.tensors_to_export['carry_gate'] = C return T, C
def _get_sog_activation(self, x, s, configs, scope, name): assert isinstance(configs, (list, tuple)) and len(configs) > 0 net_u = self.neurons(x, s, scope=scope) u = linker.softmax_over_groups(net_u, configs, name) return u