def get_layer_monitoring_channels(self, state_below=None, state=None, targets=None): rval = OrderedDict() state = state_below for layer in self.layers: # We don't go through all the inner layers recursively state_below = state state = layer.fprop(state) args = [state_below, state] if layer is self.layers[-1] and targets is not None: args.append(targets) ch = layer.get_layer_monitoring_channels(*args) if not isinstance(ch, OrderedDict): raise TypeError(str((type(ch), layer.layer_name))) for key in ch: value = ch[key] doc = get_monitor_doc(value) if doc is None: doc = str(type(layer)) + \ ".get_monitoring_channels_from_state did" + \ " not provide any further documentation for" + \ " this channel." doc = 'This channel came from a layer called "' + \ layer.layer_name + '" of an MLP.\n' + doc value.__doc__ = doc rval[layer.layer_name+'_'+key] = value return rval
def get_monitoring_channels(self, data): state_below = data targets = None rval = OrderedDict() if self.encoder is not None: rval = OrderedDict() """ state = state_below for layer in self.layers: state_below = state state = layer.fprop(state) args = [state_below, state] if layer is self.layers[-1] and targets is not None: args.append(targets) ch = layer.get_layer_monitoring_channels(*args) if not isinstance(ch, OrderedDict): raise TypeError(str((type(ch), layer.layer_name))) for key in ch: value = ch[key] doc = get_monitor_doc(value) if doc is None: doc = str(type(layer)) + \ ".get_monitoring_channels_from_state did" + \ " not provide any further documentation for" + \ " this channel." doc = 'This channel came from a layer called "' + \ layer.layer_name + '" of an MLP.\n' + doc value.__doc__ = doc rval[layer.layer_name + '_' + key] = value """ top_layer = self.layers[-1] ch = top_layer.get_layer_monitoring_channels() for key, value in ch.iteritems(): doc = get_monitor_doc(value) if doc is None: doc = str(type(top_layer)) + \ ".get_monitoring_channels_from_state did" + \ " not provide any further documentation for" + \ " this channel." doc = 'This channel came from a layer called "' + \ top_layer.layer_name + '" of an MLP.\n' + doc value.__doc__ = doc rval[top_layer.layer_name + '_' + key] = value Z, log_det_jac = self.get_fprop_and_log_det_jacobian(data) prior = self.log_p_z(Z) rval['ave_output'] = Z.mean() rval['ave_log_det_jac'] = log_det_jac.mean() rval['ave_prior'] = prior.mean() rval['cumulative_sum'] = self.prior.get_cumulative(Z).sum(axis=1).mean(axis=0) return rval
def get_layer_monitoring_channels(self, state_below=None, state=None, targets=None): """ Block monitoring channels if not necessary Parameters --------- : todo """ rval = OrderedDict() if self.use_monitoring_channels: state = state_below x = state state_conc = None for layer in self.layers: # We don't go through all the inner layers recursively state_below = state if ((self.x_shortcut and layer is not self.layers[0] and layer is not self.layers[-1])): state = self.create_shortcut_batch(state, x, 2, 1) if self.y_shortcut and layer is self.layers[-1]: state = layer.fprop(state_conc) else: state = layer.fprop(state) if self.y_shortcut and layer is not self.layers[-1]: if layer is self.layers[0]: state_conc = state else: state_conc = self.create_shortcut_batch( state_conc, state, 2) args = [state_below, state] if layer is self.layers[-1] and targets is not None: args.append(targets) ch = layer.get_layer_monitoring_channels(*args) if not isinstance(ch, OrderedDict): raise TypeError(str((type(ch), layer.layer_name))) for key in ch: value = ch[key] doc = get_monitor_doc(value) if doc is None: doc = str(type(layer)) + \ ".get_monitoring_channels_from_state did" + \ " not provide any further documentation for" + \ " this channel." doc = 'This channel came from a layer called "' + \ layer.layer_name + '" of an MLP.\n' + doc value.__doc__ = doc rval[layer.layer_name + '_' + key] = value return rval
def get_layer_monitoring_channels(self, state_below=None, state=None, targets=None): """ Block monitoring channels if not necessary Parameters --------- : todo """ rval = OrderedDict() if self.use_monitoring_channels: state = state_below x = state state_conc = None for layer in self.layers: # We don't go through all the inner layers recursively state_below = state if ((self.x_shortcut and layer is not self.layers[0] and layer is not self.layers[-1])): state = self.create_shortcut_batch(state, x, 2, 1) if self.y_shortcut and layer is self.layers[-1]: state = layer.fprop(state_conc) else: state = layer.fprop(state) if self.y_shortcut and layer is not self.layers[-1]: if layer is self.layers[0]: state_conc = state else: state_conc = self.create_shortcut_batch(state_conc, state, 2) args = [state_below, state] if layer is self.layers[-1] and targets is not None: args.append(targets) ch = layer.get_layer_monitoring_channels(*args) if not isinstance(ch, OrderedDict): raise TypeError(str((type(ch), layer.layer_name))) for key in ch: value = ch[key] doc = get_monitor_doc(value) if doc is None: doc = str(type(layer)) + \ ".get_monitoring_channels_from_state did" + \ " not provide any further documentation for" + \ " this channel." doc = 'This channel came from a layer called "' + \ layer.layer_name + '" of an MLP.\n' + doc value.__doc__ = doc rval[layer.layer_name + '_' + key] = value return rval