Ejemplo n.º 1
0
    def __init__(self,
                 data_tree_root,
                 leaves,
                 model_spec_dict,
                 dummy_input,
                 min_distance=1e-7,
                 delta_loss_mode='l1'):
        print(str(len(leaves)))
        super(TreeModelMF, self).__init__()
        layers = list()
        self.data_tree_root = data_tree_root
        self.leaves = leaves
        self.model_spec_dict = model_spec_dict
        self.dummy_input = dummy_input
        self.min_distance = min_distance
        self.delta_loss_mode = delta_loss_mode
        self.delta_tensor = tf.identity([0.0])
        self.root_model = self.instantiate_submodel()
        self.root_layer = MultifurcatingLayerNode(self.root_model)
        self.num_weight_tensors = len(self.root_model.weights)
        layers.append(self.root_layer)
        self.leaf_layers = list()
        self.copy_child_nodes(data_tree_root, self.root_layer, layers, leaves)

        # initialize all the layers to be attributes, allows for the tracking of gradients
        for i in range(len(layers)):
            setattr(self, str('layer_' + str(i)), layers[i].layer)
Ejemplo n.º 2
0
    def copy_child_nodes(self, data_node, layer_node, layers, leaves):
        layer_node.height = data_node.height
        # if node is a leaf store the index of the corresponding training example
        if data_node.descendants is None or len(data_node.descendants) == 0:
            layer_node.is_leaf = True
            found_match = False
            for i in range(len(leaves)):
                if leaves[i] == data_node:
                    layer_node.train_index = i
                    self.leaf_layers.append(layer_node)
                    found_match = True
            if not found_match:
                print('missing leaf match')
            submodel = self.instantiate_submodel()
            layer_node.layer = submodel

        # if not a leaf, recursively copy the children
        else:
            for data_child in data_node.descendants:
                new_layer = self.instantiate_submodel()
                layer_node.descendants.append(
                    MultifurcatingLayerNode(layer=new_layer,
                                            parent=layer_node))
                layers.append(layer_node.descendants[-1])
                self.copy_child_nodes(data_child, layer_node.descendants[-1],
                                      layers, leaves)
Ejemplo n.º 3
0
    def __init__(self, data_tree_root, leaves, layer_shape, output_dim, num_dense_units, num_features, splice_sites,
                 weight_shapes, min_distance = 1e-7):
        print(str(len(leaves)))
        super(TreeModelNN, self).__init__()
        layers = list()
        self.min_distance = min_distance
        self.layer_shape = layer_shape
        self.num_features = num_features
        self.addition_layer_0 = AdditionLayer(self.layer_shape)
        self.root_layer = MultifurcatingLayerNode(self.addition_layer_0)
        self.zero_vector = tf.zeros(layer_shape)
        self.delta_tensor = tf.identity([0.0])
        self.dummy_input = tf.zeros(shape=(1, num_features))
        self.output_dim = output_dim
        self.num_dense_units = num_dense_units
        self.splice_sites = splice_sites
        self.weight_shapes = weight_shapes
        # replicate the data-tree structure in layers
        self.copy_child_nodes(data_tree_root, self.root_layer, layers, leaves)

        # initialize all the layers to be attributes for god knows what reason
        for i in range(len(layers)):
            setattr(self, str(i), layers[i].layer)
            layers[i].layer.build(self.layer_shape)
            if layers[i].is_leaf:
                setattr(self, str(i) + '_model', layers[i].model)
Ejemplo n.º 4
0
 def copy_child_nodes(self, data_node, layer_node, layers, leaves):
     layer_node.height = data_node.height
     # if node is a leaf store the index of the corresponding training example
     if data_node.descendants is None or len(data_node.descendants) == 0:
         layer_node.is_leaf = True
         # may be able to remove this if we are careful about preserving order
         found_match = False
         for i in range(len(leaves)):
             if leaves[i] == data_node:
                 layer_node.train_index = i
                 found_match = True
         # assert found_match, 'Missing leaf match'
         if not found_match:
             print('missing leaf match')
         """
         Note that we could eventually store a list of parents here and get a path of the layers that need to be
         fired to predict this leaf, but for now we assume we always fire on the entire tree at once, as this 
         actually reduces total number of computations for a training epoch and makes calculating the regularization
         error much simpler
         """
     # if not a leaf, recursively copy the children
     else:
         for data_child in data_node.descendants:
             new_layer = AdditionLayer(self.layer_shape)
             layer_node.descendants.append(MultifurcatingLayerNode(layer=new_layer, parent=layer_node))
             layers.append(layer_node.descendants[-1])
             self.copy_child_nodes(data_child, layer_node.descendants[-1], layers, leaves)
Ejemplo n.º 5
0
 def __init__(self, data_tree_root, leaves, hidden_dims, layer_shape, min_distance = 1e-7):
     super(FeedForwardClassificationTree, self).__init__(data_tree_root, leaves, hidden_dims, layer_shape, min_distance)
     layers = list()
     self.min_distance = min_distance
     self.hidden_dims = hidden_dims
     self.layer_shape = layer_shape  # a flat layer with the number of weights
     self.addition_layer_0 = AdditionLayer(self.layer_shape)
     self.root_layer = MultifurcatingLayerNode(self.addition_layer_0)
     # replicate the data-tree structure in layers
     self.copy_child_nodes(data_tree_root, self.root_layer, layers, leaves)
     self.zero_vector = tf.zeros(layer_shape)
     self.delta_tensor = tf.identity([0.0])
     # initialize all the layers to be attributes
     for i in range(len(layers)):
         setattr(self, str(i), layers[i].layer)
         layers[i].layer.build(self.layer_shape)
Ejemplo n.º 6
0
 def __init__(self, data_tree_root, leaves, layer_shape, min_distance = 1e-7):
     print(str(len(leaves)))
     super(MultifurcatingTreeModelLinReg, self).__init__(data_tree_root, leaves, layer_shape, min_distance = 1e-7)
     layers = list()
     self.min_distance = min_distance
     self.layer_shape = layer_shape
     self.addition_layer_0 = AdditionLayer(self.layer_shape)
     self.root_layer = MultifurcatingLayerNode(self.addition_layer_0)
     # replicate the data-tree structure in layers
     self.copy_child_nodes(data_tree_root, self.root_layer, layers, leaves)
     self.zero_vector = tf.zeros(layer_shape)
     self.delta_tensor = tf.identity([0.0])
     # initialize all the layers to be attributes for god knows what reason
     for i in range(len(layers)):
         setattr(self, str(i), layers[i].layer)
         layers[i].layer.build(self.layer_shape)
Ejemplo n.º 7
0
 def __init__(self, data_tree_root, leaves, hidden_dims, layer_shape, min_distance = 1e-7, num_activations=7):
     super(RandomEntangledModel, self).__init__(data_tree_root, leaves, layer_shape, min_distance)
     layers = list()
     self.num_activations = num_activations
     self.leaf_weights = tf.Variable([tf.zeros(shape=(layer_shape), dtype=tf.float32) for _ in range(len(leaves))], trainable=False)
     self.leaf_activations = tf.Variable([tf.zeros(shape=(self.num_activations), dtype=tf.float32) for _ in range(len(leaves))], trainable=False)
     self.min_distance = min_distance
     self.hidden_dims = hidden_dims
     self.layer_shape = layer_shape  # a flat layer with the number of weights
     self.addition_layer_0 = AdditionLayer(self.layer_shape)
     self.root_layer = MultifurcatingLayerNode(self.addition_layer_0)
     # replicate the data-tree structure in layers
     self.copy_child_nodes(data_tree_root, self.root_layer, layers, leaves)
     self.zero_vector = tf.zeros(layer_shape)
     self.delta_tensor = tf.identity([0.0])
     # initialize all the layers to be attributes
     for i in range(len(layers)):
         setattr(self, str(i), layers[i].layer)
         layers[i].layer.build(self.layer_shape)
Ejemplo n.º 8
0
 def __init__(self, data_tree_root, leaves, layer_shape, min_distance = 1e-7, delta_loss_style='l1', delta_const_1=0.0, delta_const_2=0.001):
     super(EntangledDeltaTreeModel, self).__init__(data_tree_root, leaves, layer_shape, min_distance)
     layers = list()
     assert delta_loss_style in ['l1', 'l1_indicator_approx']
     self.delta_const_1 = delta_const_1
     self.delta_const_2 = delta_const_2
     self.delta_loss_style = delta_loss_style
     self.leaf_weights = tf.Variable([tf.zeros(shape=(3, 3), dtype=tf.float32) for _ in range(len(leaves))], trainable=False)
     self.leaf_biases = tf.Variable([tf.zeros(shape=(3), dtype=tf.float32) for _ in range(len(leaves))], trainable=False)
     self.leaf_activations = tf.Variable([tf.zeros(shape=(1, 3), dtype=tf.float32) for _ in range(len(leaves))], trainable=False)
     self.min_distance = min_distance
     self.layer_shape = layer_shape
     self.addition_layer_0 = AdditionLayer(self.layer_shape)
     self.root_layer = MultifurcatingLayerNode(self.addition_layer_0)
     # replicate the data-tree structure in layers
     self.copy_child_nodes(data_tree_root, self.root_layer, layers, leaves)
     self.zero_vector = tf.zeros(layer_shape)
     self.delta_tensor = tf.identity([0.0])
     # initialize all the layers to be attributes
     for i in range(len(layers)):
         setattr(self, str(i), layers[i].layer)
         layers[i].layer.build(self.layer_shape)
Ejemplo n.º 9
0
    def copy_child_nodes(self, data_node, layer_node, layers, leaves):
        layer_node.height = data_node.height
        # if node is a leaf store the index of the corresponding training example
        if data_node.descendants is None or len(data_node.descendants) == 0:
            layer_node.is_leaf = True
            found_match = False
            for i in range(len(leaves)):
                if leaves[i] == data_node:
                    layer_node.train_index = i
                    found_match = True
            if not found_match:
                print('missing leaf match')
            # create NN submodel, instantiate it to create initial weights
            net = NotTrainableFeedForwardSubModel(self.output_dim, self.num_dense_units, (1, self.num_features))
            net.call(self.dummy_input)
            layer_node.model = net

        # if not a leaf, recursively copy the children
        else:
            for data_child in data_node.descendants:
                new_layer = AdditionLayer(self.layer_shape)
                layer_node.descendants.append(MultifurcatingLayerNode(layer=new_layer, parent=layer_node))
                layers.append(layer_node.descendants[-1])
                self.copy_child_nodes(data_child, layer_node.descendants[-1], layers, leaves)