def test_subscribe_lazy_prop_change(self): prop_class = _PropClass() checker = [] subscribe_to_lazy_prop(prop_class, 'lazyprop', lambda _: checker.append(1)) clear_all_lazyprops(prop_class) self.assertEqual(checker, [1])
def test_clear_all(self): prop_class = _PropClass() prop_class.STATIC_VAL = 1 self.assertEquals(prop_class.lazyprop, 1) prop_class.STATIC_VAL = 2 self.assertEquals(prop_class.lazyprop, 1) clear_all_lazyprops(prop_class) self.assertEquals(prop_class.lazyprop, 2)
def detach_output(self): next_layer = self._next_layer next_layer._input_layer = None clear_all_lazyprops(next_layer) self._next_layer = None clear_all_lazyprops(self) return next_layer
def test_unsubscribe_lazy_prop_change(self): prop_class = _PropClass() checker = [] func = lambda _: checker.append(1) subscribe_to_lazy_prop(prop_class, 'lazyprop', func) clear_all_lazyprops(prop_class) self.assertEqual(len(checker), 1) unsubscribe_from_lazy_prop(prop_class, 'lazyprop', func) clear_all_lazyprops(prop_class) self.assertEqual(len(checker), 1)
def detach_output(self): """Detaches the connect between this layer and the next layer Returns: BaseLayer : The next layer, now detached from this layer """ if self._next_layer is None: raise ValueError("Cannot detach_output if there is no next layer") next_layer = self._next_layer next_layer._input_layer = None clear_all_lazyprops(next_layer) self._next_layer = None clear_all_lazyprops(self) return next_layer
def test_clear_all(self): class PropClass(): STATIC_VAL = None @lazyprop def lazyprop(self): return self.STATIC_VAL prop_class = PropClass() prop_class.STATIC_VAL = 1 self.assertEquals(prop_class.lazyprop, 1) prop_class.STATIC_VAL = 2 self.assertEquals(prop_class.lazyprop, 1) clear_all_lazyprops(prop_class) self.assertEquals(prop_class.lazyprop, 2)
def resize(self, new_output_nodes=None, output_nodes_to_prune=None, input_nodes_to_prune=None, split_output_nodes=None, split_input_nodes=None, split_nodes_noise_std=.1): output_nodes = (functools.reduce(operator.mul, self.input_layer.output_nodes),) if self.output_nodes != output_nodes: self._output_nodes = output_nodes # can't resize the tf.reshape so just regen everything clear_all_lazyprops(self) for layer in self.downstream_layers: clear_all_lazyprops(layer) if self.next_layer is not None and self.next_layer._resize_needed(): # TODO: D.S make sure resize is consistant, i.e new nodes are not just created on the end... # Must do this at some point self._next_layer.resize(input_nodes_to_prune=output_nodes_to_prune, split_input_nodes=split_output_nodes)
def resize(self, new_output_nodes=None, output_nodes_to_prune=None, input_nodes_to_prune=None, split_output_nodes=None, split_input_nodes=None, split_nodes_noise_std=.1): output_nodes = self._calculate_output_nodes(self.input_layer, self._strides) if self.output_nodes != output_nodes: self._output_nodes = output_nodes clear_all_lazyprops(self) for layer in self.downstream_layers: clear_all_lazyprops(layer) if self.next_layer is not None and self.next_layer._resize_needed(): # TODO: D.S make sure resize is consistant, i.e new nodes are not just created on the end... # Must do this at some point self._next_layer.resize(input_nodes_to_prune=output_nodes_to_prune, split_input_nodes=split_output_nodes)