Beispiel #1
0
    def get_score(self, statement, hyps, f):
        in_string, in_parents, in_left, in_right, in_params, depths, \
                parent_arity, leaf_position, arity = self.parse_statement_and_hyps(
                statement, hyps, f)

        #print in_string
        to_middle = self.gru_block(self.v.forward_start,
                                   in_string,
                                   in_params,
                                   hs_backward=self.v.backward_start,
                                   parents=in_parents,
                                   left_siblings=in_left,
                                   right_siblings=in_right,
                                   bidirectional=self.config.p.bidirectional,
                                   structure_data=list(
                                       zip(depths, parent_arity, leaf_position,
                                           arity)),
                                   feed_to_attention=False)

        h = nn.ConcatNode(to_middle, self.g)
        h = nn.DropoutNode(h, self.dropout, self.g)
        h = nn.RELUDotAdd(h, self.v.main_first_W, self.v.main_first_b, self.g)
        h = nn.DropoutNode(h, self.dropout, self.g)
        for i in range(self.config.p.out_layers):
            h = nn.RELUDotAdd(h, self.v.main_Ws[i], self.v.main_bs[i], self.g)
            h = nn.DropoutNode(h, self.dropout, self.g)
        h = nn.DotNode(h, self.v.last_W, self.g)
        h = nn.AddNode([h, self.v.last_b], self.g)

        return h
Beispiel #2
0
    def x_to_predictions(self, x):
        x = nn.DropoutNode(x, self.dropout, self.g)

        for W, b in zip(self.v.out_Ws, self.v.out_bs):
            x = nn.RELUDotAdd(x, W, b, self.g)
            x = nn.DropoutNode(x, self.dropout, self.g)

        logits = nn.RELUDotAdd(x, self.v.last_W, self.v.last_b, self.g)
        #print logits.value
        #print
        return logits
    def __init__(self, variables, config, tree, context, fit, this_ua, prop):
        train = False

        DefaultModel.__init__(self, config, variables, train=train)
        self.g = None  # don't remember all of the stuff in the graph.

        # we don't know the output, so don't try to determine it
        self.parse_and_augment_proof_step(tree, context, fit, this_ua, prop)

        # merge the inputs together so that we can bidirection it
        in_string, in_parents, in_left, in_right, in_params, depths, parent_arity, leaf_position, arity = merge_graph_structures(
                [self.known_graph_structure, self.to_prove_graph_structure],
                [self.v.known_gru_block, self.v.to_prove_gru_block])

        # do the left side gru blocks
        to_middle = self.gru_block(self.v.forward_start, in_string, in_params,
                hs_backward=self.v.backward_start, parents=in_parents,
                left_siblings=in_left, right_siblings=in_right,
                bidirectional=self.config.p.bidirectional,
                structure_data = zip(depths, parent_arity, leaf_position, arity),
                feed_to_attention=self.config.p.attention)

        # set up the attentional model
        if self.config.p.attention:
            self.set_up_attention()

        # process the middle
        from_middle = [nn.RELUDotAdd(x, W, b, self.g)
                for x, W, b in zip(to_middle, self.v.middle_W, self.v.middle_b)]

        self.initial_h = from_middle
Beispiel #4
0
    def get_vector(self, statement, hyps, f, statement_gru, hyps_gru,
            forward_start, backward_start, first_W, first_b, Ws, bs):
        in_string, in_parents, in_left, in_right, in_params, depths, \
                parent_arity, leaf_position, arity = self.parse_statement_and_hyps(
                statement, hyps, f, statement_gru, hyps_gru)

        #print in_string
        to_middle = self.gru_block(forward_start, in_string, in_params,
                hs_backward=backward_start, parents=in_parents,
                left_siblings=in_left, right_siblings=in_right,
                bidirectional=self.config.p.bidirectional,
                structure_data = zip(depths, parent_arity, leaf_position, arity),
                feed_to_attention=False)

        h = nn.ConcatNode(to_middle, self.g)
        h = nn.DropoutNode(h, self.dropout, self.g)
        h = nn.RELUDotAdd(h, first_W, first_b, self.g)
        h = nn.DropoutNode(h, self.dropout, self.g)
        for i in range(self.config.p.out_layers):
            h = nn.RELUDotAdd(h, Ws[i], bs[i], self.g)
            h = nn.DropoutNode(h, self.dropout, self.g)

        return h
Beispiel #5
0
    def __init__(self,
                 variables,
                 config,
                 proof_step,
                 train=False,
                 target_index=None):
        ''' this is the model.  As a single pass, it processes the
        inputs, and computes the losses, and runs a training step if
        train.
        '''
        DefaultModel.__init__(self, config, variables, train=train)
        if not self.train:
            np.random.seed(proof_step.context.number +
                           +proof_step.prop.number + proof_step.tree.size())

        self.parse_and_augment_proof_step(proof_step,
                                          target_index=target_index)

        # merge the inputs together so that we can bidirection it
        in_string, in_parents, in_left, in_right, in_params, depths, parent_arity, leaf_position, arity = merge_graph_structures(
            [self.known_graph_structure, self.to_prove_graph_structure],
            [self.v.known_gru_block, self.v.to_prove_gru_block])

        # print
        # print in_string
        # print in_parents
        # print in_left
        # print in_right
        # print depths
        # print parent_arity
        # print leaf_position
        # print arity

        # do the left side gru blocks
        to_middle = self.gru_block(self.v.forward_start,
                                   in_string,
                                   in_params,
                                   hs_backward=self.v.backward_start,
                                   parents=in_parents,
                                   left_siblings=in_left,
                                   right_siblings=in_right,
                                   bidirectional=self.config.p.bidirectional,
                                   structure_data=list(
                                       zip(depths, parent_arity, leaf_position,
                                           arity)),
                                   feed_to_attention=self.config.p.attention)

        # set up the attentional model
        if self.config.p.attention:
            self.set_up_attention()

        # process the middle
        from_middle = [
            nn.RELUDotAdd(x, W, b, self.g)
            for x, W, b in zip(to_middle, self.v.middle_W, self.v.middle_b)
        ]

        # process the right side
        out_string = self.out_graph_structure.string
        out_parents = self.out_graph_structure.parents
        out_left = self.out_graph_structure.left_sibling
        arity = self.out_graph_structure.arity
        leaf_position = self.out_graph_structure.leaf_position
        parent_arity = self.out_graph_structure.parent_arity
        depths = self.out_graph_structure.depth
        structure_data = list(zip(depths, parent_arity, leaf_position, arity))

        out_length = len(out_string)
        out_xs = []
        all_hs = []
        hs = from_middle
        for i in range(out_length):
            # figure out the augmentation stuff.
            if self.config.p.augmentation:
                parent = out_parents[i]
                parent_hs = [x.no_parent for x in self.v.out_gru_block.aug
                             ] if parent == -1 else all_hs[parent]
                left = out_left[i]
                left_hs = [
                    x.no_left_sibling for x in self.v.out_gru_block.aug
                ] if left == -1 else all_hs[left]
            else:
                parent_hs = None
                left_hs = None

            hs, x = self.forward_vertical_slice(
                hs,
                parent_hs,
                left_hs,
                out_string[i],
                self.v.out_gru_block.forward,
                structure_data[i],
                takes_attention=self.config.p.attention)
            all_hs.append(hs)
            out_xs.append(x)

        # test

        # calculate logits and score
        self.correct_string = out_string[1:] + ['END_OF_SECTION']
        #out_xs = [nn.ZerosNode([64], self.g) for token in correct_string]
        self.all_correct = True
        self.num_correct = 0
        self.all_logits = [self.x_to_predictions(x) for x in out_xs]
        self.prediction = []
        all_costs = [
            self.score(logits, c_token)
            for logits, c_token in zip(self.all_logits, self.correct_string)
        ]
        perplexity = nn.AddNode(all_costs, self.g)

        #self.logit_matrix = np.concat([l.value for l in all_logits])

        self.loss = nn.AddNode([perplexity, self.loss], self.g)

        # and train
        if train:
            # print 'training2'
            # print len(self.v.vs), len(self.g.nodes)
            self.g.backprop(self.loss)
            # for v in self.v.vs:
            #     print v.name, np.mean(v.value ** 2)
            #self.v.optimizer.minimize()

        # put the outputs in the standard training format
        self.outputs = [
            perplexity.value, self.num_correct, 1 * self.all_correct
        ]
        self.output_counts = [out_length, out_length, 1]