Пример #1
0
    def create_model(self):
        
        tdat_input = Input(shape=(self.tdatlen,))
        smlnode_input = Input(shape=(self.smllen,))
        smledge_input = Input(shape=(self.smllen, self.smllen))
        
        tdel = Embedding(output_dim=self.embdims, input_dim=self.tdatvocabsize, mask_zero=False)
        tde = tdel(tdat_input)
        
        se = tdel(smlnode_input)

        tenc = CuDNNGRU(self.recdims, return_state=True, return_sequences=True)
        tencout, tstate_h = tenc(tde)

        wrknodes = se
        for k in range(self.config['asthops']):
            astwork = OurCustomGraphLayer()([wrknodes, smledge_input])
            astwork = concatenate([astwork, wrknodes]) # combine the new node vectors with the previous iteration
            astwork = Dense(self.embdims)(astwork) # use a dense layer to squash back to proper dimension
            wrknodes = astwork

        context = concatenate([tencout, astwork], axis=1)

        out = Flatten()(context)
        out1 = Dense(self.comvocabsize, activation="softmax")(out)
        
        model = Model(inputs=[tdat_input, smlnode_input, smledge_input], outputs=out1)
        
        if self.config['multigpu']:
            model = keras.utils.multi_gpu_model(model, gpus=2)

        model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.001, clipnorm=20.), metrics=['accuracy'])
        return self.config, model
Пример #2
0
    def create_model(self):

        tdat_input = Input(shape=(self.tdatlen, ))
        sdat_input = Input(shape=(self.sdatlen, self.config['stdatlen']))
        com_input = Input(shape=(self.comlen, ))
        smlnode_input = Input(shape=(self.smllen, ))
        smledge_input = Input(shape=(self.smllen, self.smllen))

        tdel = Embedding(output_dim=self.embdims,
                         input_dim=self.tdatvocabsize,
                         mask_zero=False)
        tde = tdel(tdat_input)

        # Adding file context information to the graph2seq model
        # note on embedding spaces below
        # in this model, the embedding is shared between tdats, sdats, and ast nodes
        se = tdel(smlnode_input)

        tenc = CuDNNGRU(self.recdims, return_state=True, return_sequences=True)
        tencout, tstate_h = tenc(tde)

        de = Embedding(output_dim=self.embdims,
                       input_dim=self.comvocabsize,
                       mask_zero=False)(com_input)
        dec = CuDNNGRU(self.recdims, return_sequences=True)
        decout = dec(de, initial_state=tstate_h)

        tattn = dot([decout, tencout], axes=[2, 2])
        tattn = Activation('softmax')(tattn)
        tcontext = dot([tattn, tencout], axes=[2, 1])

        semb = TimeDistributed(tdel)
        sde = semb(sdat_input)

        senc = TimeDistributed(CuDNNGRU(int(self.recdims)))
        senc = senc(sde)

        sattn = dot([decout, senc], axes=[2, 2])
        sattn = Activation('softmax')(sattn)
        scontext = dot([sattn, senc], axes=[2, 1])

        wrknodes = se
        for k in range(self.config['asthops']):
            astwork = OurCustomGraphLayer()([wrknodes, smledge_input])
            astwork = concatenate([
                astwork, wrknodes
            ])  # combine the new node vectors with the previous iteration
            astwork = Dense(self.embdims)(
                astwork
            )  # use a dense layer to squash back to proper dimension
            wrknodes = astwork

        #astwork = CuDNNGRU(self.recdims, return_sequences=True)(astwork, initial_state=tstate_h)

        # attend decoder words to nodes in ast
        aattn = dot([decout, astwork], axes=[2, 2])
        aattn = Activation('softmax')(aattn)
        acontext = dot([aattn, astwork], axes=[2, 1])

        context = concatenate([scontext, tcontext, decout, acontext])

        out = TimeDistributed(Dense(self.tdddims, activation="relu"))(context)

        out = Flatten()(out)
        out1 = Dense(self.comvocabsize, activation="softmax")(out)

        model = Model(inputs=[
            tdat_input, sdat_input, com_input, smlnode_input, smledge_input
        ],
                      outputs=out1)

        if self.config['multigpu']:
            model = keras.utils.multi_gpu_model(model, gpus=2)

        model.compile(loss='categorical_crossentropy',
                      optimizer=Adam(lr=0.001, clipnorm=20.),
                      metrics=['accuracy'])
        return self.config, model